2002-02-19 Philip Blundell <philb@gnu.org>
[official-gcc.git] / gcc / function.c
blob0c2298c2474487ca6f7e9453f2c072a28d5f122a
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hash.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't contain any instructions
106 that can throw an exception. This is set prior to final. */
108 int current_function_nothrow;
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero.
122 calls.c:emit_library_call_value_1 uses it to set up
123 post-instantiation libcalls. */
124 int virtuals_instantiated;
126 /* These variables hold pointers to functions to create and destroy
127 target specific, per-function data structures. */
128 void (*init_machine_status) PARAMS ((struct function *));
129 void (*free_machine_status) PARAMS ((struct function *));
130 /* This variable holds a pointer to a function to register any
131 data items in the target specific, per-function data structure
132 that will need garbage collection. */
133 void (*mark_machine_status) PARAMS ((struct function *));
135 /* Likewise, but for language-specific data. */
136 void (*init_lang_status) PARAMS ((struct function *));
137 void (*save_lang_status) PARAMS ((struct function *));
138 void (*restore_lang_status) PARAMS ((struct function *));
139 void (*mark_lang_status) PARAMS ((struct function *));
140 void (*free_lang_status) PARAMS ((struct function *));
142 /* The FUNCTION_DECL for an inline function currently being expanded. */
143 tree inline_function_decl;
145 /* The currently compiled function. */
146 struct function *cfun = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static varray_type prologue;
150 static varray_type epilogue;
152 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
153 in this function. */
154 static varray_type sibcall_epilogue;
156 /* In order to evaluate some expressions, such as function calls returning
157 structures in memory, we need to temporarily allocate stack locations.
158 We record each allocated temporary in the following structure.
160 Associated with each temporary slot is a nesting level. When we pop up
161 one level, all temporaries associated with the previous level are freed.
162 Normally, all temporaries are freed after the execution of the statement
163 in which they were created. However, if we are inside a ({...}) grouping,
164 the result may be in a temporary and hence must be preserved. If the
165 result could be in a temporary, we preserve it if we can determine which
166 one it is in. If we cannot determine which temporary may contain the
167 result, all temporaries are preserved. A temporary is preserved by
168 pretending it was allocated at the previous nesting level.
170 Automatic variables are also assigned temporary slots, at the nesting
171 level where they are defined. They are marked a "kept" so that
172 free_temp_slots will not free them. */
174 struct temp_slot
176 /* Points to next temporary slot. */
177 struct temp_slot *next;
178 /* The rtx to used to reference the slot. */
179 rtx slot;
180 /* The rtx used to represent the address if not the address of the
181 slot above. May be an EXPR_LIST if multiple addresses exist. */
182 rtx address;
183 /* The alignment (in bits) of the slot. */
184 unsigned int align;
185 /* The size, in units, of the slot. */
186 HOST_WIDE_INT size;
187 /* The type of the object in the slot, or zero if it doesn't correspond
188 to a type. We use this to determine whether a slot can be reused.
189 It can be reused if objects of the type of the new slot will always
190 conflict with objects of the type of the old slot. */
191 tree type;
192 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
193 tree rtl_expr;
194 /* Non-zero if this temporary is currently in use. */
195 char in_use;
196 /* Non-zero if this temporary has its address taken. */
197 char addr_taken;
198 /* Nesting level at which this slot is being used. */
199 int level;
200 /* Non-zero if this should survive a call to free_temp_slots. */
201 int keep;
202 /* The offset of the slot from the frame_pointer, including extra space
203 for alignment. This info is for combine_temp_slots. */
204 HOST_WIDE_INT base_offset;
205 /* The size of the slot, including extra space for alignment. This
206 info is for combine_temp_slots. */
207 HOST_WIDE_INT full_size;
210 /* This structure is used to record MEMs or pseudos used to replace VAR, any
211 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
212 maintain this list in case two operands of an insn were required to match;
213 in that case we must ensure we use the same replacement. */
215 struct fixup_replacement
217 rtx old;
218 rtx new;
219 struct fixup_replacement *next;
222 struct insns_for_mem_entry
224 /* The KEY in HE will be a MEM. */
225 struct hash_entry he;
226 /* These are the INSNS which reference the MEM. */
227 rtx insns;
230 /* Forward declarations. */
232 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
233 int, struct function *));
234 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
235 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
236 enum machine_mode, enum machine_mode,
237 int, unsigned int, int,
238 struct hash_table *));
239 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
240 enum machine_mode,
241 struct hash_table *));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
247 int, int));
248 static void fixup_var_refs_insns_with_hash
249 PARAMS ((struct hash_table *, rtx,
250 enum machine_mode, int));
251 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
252 int, int));
253 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
254 struct fixup_replacement **));
255 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
256 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
257 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
258 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
259 static void instantiate_decls PARAMS ((tree, int));
260 static void instantiate_decls_1 PARAMS ((tree, int));
261 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
262 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
263 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
264 static void delete_handlers PARAMS ((void));
265 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
266 struct args_size *));
267 #ifndef ARGS_GROW_DOWNWARD
268 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
269 tree));
270 #endif
271 static rtx round_trampoline_addr PARAMS ((rtx));
272 static rtx adjust_trampoline_addr PARAMS ((rtx));
273 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
274 static void reorder_blocks_0 PARAMS ((tree));
275 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
276 static void reorder_fix_fragments PARAMS ((tree));
277 static tree blocks_nreverse PARAMS ((tree));
278 static int all_blocks PARAMS ((tree, tree *));
279 static tree *get_block_vector PARAMS ((tree, int *));
280 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
281 /* We always define `record_insns' even if its not used so that we
282 can always export `prologue_epilogue_contains'. */
283 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
284 static int contains PARAMS ((rtx, varray_type));
285 #ifdef HAVE_return
286 static void emit_return_into_block PARAMS ((basic_block, rtx));
287 #endif
288 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
289 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
290 struct hash_table *));
291 static void purge_single_hard_subreg_set PARAMS ((rtx));
292 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
293 static rtx keep_stack_depressed PARAMS ((rtx));
294 #endif
295 static int is_addressof PARAMS ((rtx *, void *));
296 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
297 struct hash_table *,
298 hash_table_key));
299 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
300 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
301 static int insns_for_mem_walk PARAMS ((rtx *, void *));
302 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
303 static void mark_function_status PARAMS ((struct function *));
304 static void maybe_mark_struct_function PARAMS ((void *));
305 static void prepare_function_start PARAMS ((void));
306 static void do_clobber_return_reg PARAMS ((rtx, void *));
307 static void do_use_return_reg PARAMS ((rtx, void *));
309 /* Pointer to chain of `struct function' for containing functions. */
310 static struct function *outer_function_chain;
312 /* Given a function decl for a containing function,
313 return the `struct function' for it. */
315 struct function *
316 find_function_data (decl)
317 tree decl;
319 struct function *p;
321 for (p = outer_function_chain; p; p = p->outer)
322 if (p->decl == decl)
323 return p;
325 abort ();
328 /* Save the current context for compilation of a nested function.
329 This is called from language-specific code. The caller should use
330 the save_lang_status callback to save any language-specific state,
331 since this function knows only about language-independent
332 variables. */
334 void
335 push_function_context_to (context)
336 tree context;
338 struct function *p;
340 if (context)
342 if (context == current_function_decl)
343 cfun->contains_functions = 1;
344 else
346 struct function *containing = find_function_data (context);
347 containing->contains_functions = 1;
351 if (cfun == 0)
352 init_dummy_function_start ();
353 p = cfun;
355 p->outer = outer_function_chain;
356 outer_function_chain = p;
357 p->fixup_var_refs_queue = 0;
359 if (save_lang_status)
360 (*save_lang_status) (p);
362 cfun = 0;
365 void
366 push_function_context ()
368 push_function_context_to (current_function_decl);
371 /* Restore the last saved context, at the end of a nested function.
372 This function is called from language-specific code. */
374 void
375 pop_function_context_from (context)
376 tree context ATTRIBUTE_UNUSED;
378 struct function *p = outer_function_chain;
379 struct var_refs_queue *queue;
381 cfun = p;
382 outer_function_chain = p->outer;
384 current_function_decl = p->decl;
385 reg_renumber = 0;
387 restore_emit_status (p);
389 if (restore_lang_status)
390 (*restore_lang_status) (p);
392 /* Finish doing put_var_into_stack for any of our variables
393 which became addressable during the nested function. */
394 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
395 fixup_var_refs (queue->modified, queue->promoted_mode,
396 queue->unsignedp, 0);
398 p->fixup_var_refs_queue = 0;
400 /* Reset variables that have known state during rtx generation. */
401 rtx_equal_function_value_matters = 1;
402 virtuals_instantiated = 0;
403 generating_concat_p = 1;
406 void
407 pop_function_context ()
409 pop_function_context_from (current_function_decl);
412 /* Clear out all parts of the state in F that can safely be discarded
413 after the function has been parsed, but not compiled, to let
414 garbage collection reclaim the memory. */
416 void
417 free_after_parsing (f)
418 struct function *f;
420 /* f->expr->forced_labels is used by code generation. */
421 /* f->emit->regno_reg_rtx is used by code generation. */
422 /* f->varasm is used by code generation. */
423 /* f->eh->eh_return_stub_label is used by code generation. */
425 if (free_lang_status)
426 (*free_lang_status) (f);
427 free_stmt_status (f);
430 /* Clear out all parts of the state in F that can safely be discarded
431 after the function has been compiled, to let garbage collection
432 reclaim the memory. */
434 void
435 free_after_compilation (f)
436 struct function *f;
438 free_eh_status (f);
439 free_expr_status (f);
440 free_emit_status (f);
441 free_varasm_status (f);
443 if (free_machine_status)
444 (*free_machine_status) (f);
446 if (f->x_parm_reg_stack_loc)
447 free (f->x_parm_reg_stack_loc);
449 f->x_temp_slots = NULL;
450 f->arg_offset_rtx = NULL;
451 f->return_rtx = NULL;
452 f->internal_arg_pointer = NULL;
453 f->x_nonlocal_labels = NULL;
454 f->x_nonlocal_goto_handler_slots = NULL;
455 f->x_nonlocal_goto_handler_labels = NULL;
456 f->x_nonlocal_goto_stack_level = NULL;
457 f->x_cleanup_label = NULL;
458 f->x_return_label = NULL;
459 f->x_save_expr_regs = NULL;
460 f->x_stack_slot_list = NULL;
461 f->x_rtl_expr_chain = NULL;
462 f->x_tail_recursion_label = NULL;
463 f->x_tail_recursion_reentry = NULL;
464 f->x_arg_pointer_save_area = NULL;
465 f->x_clobber_return_insn = NULL;
466 f->x_context_display = NULL;
467 f->x_trampoline_list = NULL;
468 f->x_parm_birth_insn = NULL;
469 f->x_last_parm_insn = NULL;
470 f->x_parm_reg_stack_loc = NULL;
471 f->fixup_var_refs_queue = NULL;
472 f->original_arg_vector = NULL;
473 f->original_decl_initial = NULL;
474 f->inl_last_parm_insn = NULL;
475 f->epilogue_delay_list = NULL;
478 /* Allocate fixed slots in the stack frame of the current function. */
480 /* Return size needed for stack frame based on slots so far allocated in
481 function F.
482 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
483 the caller may have to do that. */
485 HOST_WIDE_INT
486 get_func_frame_size (f)
487 struct function *f;
489 #ifdef FRAME_GROWS_DOWNWARD
490 return -f->x_frame_offset;
491 #else
492 return f->x_frame_offset;
493 #endif
496 /* Return size needed for stack frame based on slots so far allocated.
497 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
498 the caller may have to do that. */
499 HOST_WIDE_INT
500 get_frame_size ()
502 return get_func_frame_size (cfun);
505 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
506 with machine mode MODE.
508 ALIGN controls the amount of alignment for the address of the slot:
509 0 means according to MODE,
510 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
511 positive specifies alignment boundary in bits.
513 We do not round to stack_boundary here.
515 FUNCTION specifies the function to allocate in. */
517 static rtx
518 assign_stack_local_1 (mode, size, align, function)
519 enum machine_mode mode;
520 HOST_WIDE_INT size;
521 int align;
522 struct function *function;
524 rtx x, addr;
525 int bigend_correction = 0;
526 int alignment;
527 int frame_off, frame_alignment, frame_phase;
529 if (align == 0)
531 tree type;
533 if (mode == BLKmode)
534 alignment = BIGGEST_ALIGNMENT;
535 else
536 alignment = GET_MODE_ALIGNMENT (mode);
538 /* Allow the target to (possibly) increase the alignment of this
539 stack slot. */
540 type = type_for_mode (mode, 0);
541 if (type)
542 alignment = LOCAL_ALIGNMENT (type, alignment);
544 alignment /= BITS_PER_UNIT;
546 else if (align == -1)
548 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
549 size = CEIL_ROUND (size, alignment);
551 else
552 alignment = align / BITS_PER_UNIT;
554 #ifdef FRAME_GROWS_DOWNWARD
555 function->x_frame_offset -= size;
556 #endif
558 /* Ignore alignment we can't do with expected alignment of the boundary. */
559 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
560 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
562 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
563 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
565 /* Calculate how many bytes the start of local variables is off from
566 stack alignment. */
567 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
568 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
569 frame_phase = frame_off ? frame_alignment - frame_off : 0;
571 /* Round frame offset to that alignment.
572 We must be careful here, since FRAME_OFFSET might be negative and
573 division with a negative dividend isn't as well defined as we might
574 like. So we instead assume that ALIGNMENT is a power of two and
575 use logical operations which are unambiguous. */
576 #ifdef FRAME_GROWS_DOWNWARD
577 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
578 #else
579 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
580 #endif
582 /* On a big-endian machine, if we are allocating more space than we will use,
583 use the least significant bytes of those that are allocated. */
584 if (BYTES_BIG_ENDIAN && mode != BLKmode)
585 bigend_correction = size - GET_MODE_SIZE (mode);
587 /* If we have already instantiated virtual registers, return the actual
588 address relative to the frame pointer. */
589 if (function == cfun && virtuals_instantiated)
590 addr = plus_constant (frame_pointer_rtx,
591 (frame_offset + bigend_correction
592 + STARTING_FRAME_OFFSET));
593 else
594 addr = plus_constant (virtual_stack_vars_rtx,
595 function->x_frame_offset + bigend_correction);
597 #ifndef FRAME_GROWS_DOWNWARD
598 function->x_frame_offset += size;
599 #endif
601 x = gen_rtx_MEM (mode, addr);
603 function->x_stack_slot_list
604 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
606 return x;
609 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
610 current function. */
613 assign_stack_local (mode, size, align)
614 enum machine_mode mode;
615 HOST_WIDE_INT size;
616 int align;
618 return assign_stack_local_1 (mode, size, align, cfun);
621 /* Allocate a temporary stack slot and record it for possible later
622 reuse.
624 MODE is the machine mode to be given to the returned rtx.
626 SIZE is the size in units of the space required. We do no rounding here
627 since assign_stack_local will do any required rounding.
629 KEEP is 1 if this slot is to be retained after a call to
630 free_temp_slots. Automatic variables for a block are allocated
631 with this flag. KEEP is 2 if we allocate a longer term temporary,
632 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
633 if we are to allocate something at an inner level to be treated as
634 a variable in the block (e.g., a SAVE_EXPR).
636 TYPE is the type that will be used for the stack slot. */
639 assign_stack_temp_for_type (mode, size, keep, type)
640 enum machine_mode mode;
641 HOST_WIDE_INT size;
642 int keep;
643 tree type;
645 unsigned int align;
646 struct temp_slot *p, *best_p = 0;
648 /* If SIZE is -1 it means that somebody tried to allocate a temporary
649 of a variable size. */
650 if (size == -1)
651 abort ();
653 if (mode == BLKmode)
654 align = BIGGEST_ALIGNMENT;
655 else
656 align = GET_MODE_ALIGNMENT (mode);
658 if (! type)
659 type = type_for_mode (mode, 0);
661 if (type)
662 align = LOCAL_ALIGNMENT (type, align);
664 /* Try to find an available, already-allocated temporary of the proper
665 mode which meets the size and alignment requirements. Choose the
666 smallest one with the closest alignment. */
667 for (p = temp_slots; p; p = p->next)
668 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
669 && ! p->in_use
670 && objects_must_conflict_p (p->type, type)
671 && (best_p == 0 || best_p->size > p->size
672 || (best_p->size == p->size && best_p->align > p->align)))
674 if (p->align == align && p->size == size)
676 best_p = 0;
677 break;
679 best_p = p;
682 /* Make our best, if any, the one to use. */
683 if (best_p)
685 /* If there are enough aligned bytes left over, make them into a new
686 temp_slot so that the extra bytes don't get wasted. Do this only
687 for BLKmode slots, so that we can be sure of the alignment. */
688 if (GET_MODE (best_p->slot) == BLKmode)
690 int alignment = best_p->align / BITS_PER_UNIT;
691 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
693 if (best_p->size - rounded_size >= alignment)
695 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
696 p->in_use = p->addr_taken = 0;
697 p->size = best_p->size - rounded_size;
698 p->base_offset = best_p->base_offset + rounded_size;
699 p->full_size = best_p->full_size - rounded_size;
700 p->slot = gen_rtx_MEM (BLKmode,
701 plus_constant (XEXP (best_p->slot, 0),
702 rounded_size));
703 p->align = best_p->align;
704 p->address = 0;
705 p->rtl_expr = 0;
706 p->type = best_p->type;
707 p->next = temp_slots;
708 temp_slots = p;
710 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
711 stack_slot_list);
713 best_p->size = rounded_size;
714 best_p->full_size = rounded_size;
718 p = best_p;
721 /* If we still didn't find one, make a new temporary. */
722 if (p == 0)
724 HOST_WIDE_INT frame_offset_old = frame_offset;
726 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
728 /* We are passing an explicit alignment request to assign_stack_local.
729 One side effect of that is assign_stack_local will not round SIZE
730 to ensure the frame offset remains suitably aligned.
732 So for requests which depended on the rounding of SIZE, we go ahead
733 and round it now. We also make sure ALIGNMENT is at least
734 BIGGEST_ALIGNMENT. */
735 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
736 abort ();
737 p->slot = assign_stack_local (mode,
738 (mode == BLKmode
739 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
740 : size),
741 align);
743 p->align = align;
745 /* The following slot size computation is necessary because we don't
746 know the actual size of the temporary slot until assign_stack_local
747 has performed all the frame alignment and size rounding for the
748 requested temporary. Note that extra space added for alignment
749 can be either above or below this stack slot depending on which
750 way the frame grows. We include the extra space if and only if it
751 is above this slot. */
752 #ifdef FRAME_GROWS_DOWNWARD
753 p->size = frame_offset_old - frame_offset;
754 #else
755 p->size = size;
756 #endif
758 /* Now define the fields used by combine_temp_slots. */
759 #ifdef FRAME_GROWS_DOWNWARD
760 p->base_offset = frame_offset;
761 p->full_size = frame_offset_old - frame_offset;
762 #else
763 p->base_offset = frame_offset_old;
764 p->full_size = frame_offset - frame_offset_old;
765 #endif
766 p->address = 0;
767 p->next = temp_slots;
768 temp_slots = p;
771 p->in_use = 1;
772 p->addr_taken = 0;
773 p->rtl_expr = seq_rtl_expr;
774 p->type = type;
776 if (keep == 2)
778 p->level = target_temp_slot_level;
779 p->keep = 0;
781 else if (keep == 3)
783 p->level = var_temp_slot_level;
784 p->keep = 0;
786 else
788 p->level = temp_slot_level;
789 p->keep = keep;
792 /* We may be reusing an old slot, so clear any MEM flags that may have been
793 set from before. */
794 RTX_UNCHANGING_P (p->slot) = 0;
795 MEM_IN_STRUCT_P (p->slot) = 0;
796 MEM_SCALAR_P (p->slot) = 0;
797 MEM_VOLATILE_P (p->slot) = 0;
798 set_mem_alias_set (p->slot, 0);
800 /* If we know the alias set for the memory that will be used, use
801 it. If there's no TYPE, then we don't know anything about the
802 alias set for the memory. */
803 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
804 set_mem_align (p->slot, align);
806 /* If a type is specified, set the relevant flags. */
807 if (type != 0)
809 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
810 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
811 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
814 return p->slot;
817 /* Allocate a temporary stack slot and record it for possible later
818 reuse. First three arguments are same as in preceding function. */
821 assign_stack_temp (mode, size, keep)
822 enum machine_mode mode;
823 HOST_WIDE_INT size;
824 int keep;
826 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
829 /* Assign a temporary of given TYPE.
830 KEEP is as for assign_stack_temp.
831 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
832 it is 0 if a register is OK.
833 DONT_PROMOTE is 1 if we should not promote values in register
834 to wider modes. */
837 assign_temp (type, keep, memory_required, dont_promote)
838 tree type;
839 int keep;
840 int memory_required;
841 int dont_promote ATTRIBUTE_UNUSED;
843 enum machine_mode mode = TYPE_MODE (type);
844 #ifndef PROMOTE_FOR_CALL_ONLY
845 int unsignedp = TREE_UNSIGNED (type);
846 #endif
848 if (mode == BLKmode || memory_required)
850 HOST_WIDE_INT size = int_size_in_bytes (type);
851 rtx tmp;
853 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
854 problems with allocating the stack space. */
855 if (size == 0)
856 size = 1;
858 /* Unfortunately, we don't yet know how to allocate variable-sized
859 temporaries. However, sometimes we have a fixed upper limit on
860 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
861 instead. This is the case for Chill variable-sized strings. */
862 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
863 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
864 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
865 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
867 tmp = assign_stack_temp_for_type (mode, size, keep, type);
868 return tmp;
871 #ifndef PROMOTE_FOR_CALL_ONLY
872 if (! dont_promote)
873 mode = promote_mode (type, mode, &unsignedp, 0);
874 #endif
876 return gen_reg_rtx (mode);
879 /* Combine temporary stack slots which are adjacent on the stack.
881 This allows for better use of already allocated stack space. This is only
882 done for BLKmode slots because we can be sure that we won't have alignment
883 problems in this case. */
885 void
886 combine_temp_slots ()
888 struct temp_slot *p, *q;
889 struct temp_slot *prev_p, *prev_q;
890 int num_slots;
892 /* We can't combine slots, because the information about which slot
893 is in which alias set will be lost. */
894 if (flag_strict_aliasing)
895 return;
897 /* If there are a lot of temp slots, don't do anything unless
898 high levels of optimization. */
899 if (! flag_expensive_optimizations)
900 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
901 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
902 return;
904 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
906 int delete_p = 0;
908 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
909 for (q = p->next, prev_q = p; q; q = prev_q->next)
911 int delete_q = 0;
912 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
914 if (p->base_offset + p->full_size == q->base_offset)
916 /* Q comes after P; combine Q into P. */
917 p->size += q->size;
918 p->full_size += q->full_size;
919 delete_q = 1;
921 else if (q->base_offset + q->full_size == p->base_offset)
923 /* P comes after Q; combine P into Q. */
924 q->size += p->size;
925 q->full_size += p->full_size;
926 delete_p = 1;
927 break;
930 /* Either delete Q or advance past it. */
931 if (delete_q)
932 prev_q->next = q->next;
933 else
934 prev_q = q;
936 /* Either delete P or advance past it. */
937 if (delete_p)
939 if (prev_p)
940 prev_p->next = p->next;
941 else
942 temp_slots = p->next;
944 else
945 prev_p = p;
949 /* Find the temp slot corresponding to the object at address X. */
951 static struct temp_slot *
952 find_temp_slot_from_address (x)
953 rtx x;
955 struct temp_slot *p;
956 rtx next;
958 for (p = temp_slots; p; p = p->next)
960 if (! p->in_use)
961 continue;
963 else if (XEXP (p->slot, 0) == x
964 || p->address == x
965 || (GET_CODE (x) == PLUS
966 && XEXP (x, 0) == virtual_stack_vars_rtx
967 && GET_CODE (XEXP (x, 1)) == CONST_INT
968 && INTVAL (XEXP (x, 1)) >= p->base_offset
969 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
970 return p;
972 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
973 for (next = p->address; next; next = XEXP (next, 1))
974 if (XEXP (next, 0) == x)
975 return p;
978 /* If we have a sum involving a register, see if it points to a temp
979 slot. */
980 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
981 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
982 return p;
983 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
984 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
985 return p;
987 return 0;
990 /* Indicate that NEW is an alternate way of referring to the temp slot
991 that previously was known by OLD. */
993 void
994 update_temp_slot_address (old, new)
995 rtx old, new;
997 struct temp_slot *p;
999 if (rtx_equal_p (old, new))
1000 return;
1002 p = find_temp_slot_from_address (old);
1004 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1005 is a register, see if one operand of the PLUS is a temporary
1006 location. If so, NEW points into it. Otherwise, if both OLD and
1007 NEW are a PLUS and if there is a register in common between them.
1008 If so, try a recursive call on those values. */
1009 if (p == 0)
1011 if (GET_CODE (old) != PLUS)
1012 return;
1014 if (GET_CODE (new) == REG)
1016 update_temp_slot_address (XEXP (old, 0), new);
1017 update_temp_slot_address (XEXP (old, 1), new);
1018 return;
1020 else if (GET_CODE (new) != PLUS)
1021 return;
1023 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1024 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1025 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1026 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1027 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1028 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1029 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1030 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1032 return;
1035 /* Otherwise add an alias for the temp's address. */
1036 else if (p->address == 0)
1037 p->address = new;
1038 else
1040 if (GET_CODE (p->address) != EXPR_LIST)
1041 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1043 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1047 /* If X could be a reference to a temporary slot, mark the fact that its
1048 address was taken. */
1050 void
1051 mark_temp_addr_taken (x)
1052 rtx x;
1054 struct temp_slot *p;
1056 if (x == 0)
1057 return;
1059 /* If X is not in memory or is at a constant address, it cannot be in
1060 a temporary slot. */
1061 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1062 return;
1064 p = find_temp_slot_from_address (XEXP (x, 0));
1065 if (p != 0)
1066 p->addr_taken = 1;
1069 /* If X could be a reference to a temporary slot, mark that slot as
1070 belonging to the to one level higher than the current level. If X
1071 matched one of our slots, just mark that one. Otherwise, we can't
1072 easily predict which it is, so upgrade all of them. Kept slots
1073 need not be touched.
1075 This is called when an ({...}) construct occurs and a statement
1076 returns a value in memory. */
1078 void
1079 preserve_temp_slots (x)
1080 rtx x;
1082 struct temp_slot *p = 0;
1084 /* If there is no result, we still might have some objects whose address
1085 were taken, so we need to make sure they stay around. */
1086 if (x == 0)
1088 for (p = temp_slots; p; p = p->next)
1089 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1090 p->level--;
1092 return;
1095 /* If X is a register that is being used as a pointer, see if we have
1096 a temporary slot we know it points to. To be consistent with
1097 the code below, we really should preserve all non-kept slots
1098 if we can't find a match, but that seems to be much too costly. */
1099 if (GET_CODE (x) == REG && REG_POINTER (x))
1100 p = find_temp_slot_from_address (x);
1102 /* If X is not in memory or is at a constant address, it cannot be in
1103 a temporary slot, but it can contain something whose address was
1104 taken. */
1105 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1107 for (p = temp_slots; p; p = p->next)
1108 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1109 p->level--;
1111 return;
1114 /* First see if we can find a match. */
1115 if (p == 0)
1116 p = find_temp_slot_from_address (XEXP (x, 0));
1118 if (p != 0)
1120 /* Move everything at our level whose address was taken to our new
1121 level in case we used its address. */
1122 struct temp_slot *q;
1124 if (p->level == temp_slot_level)
1126 for (q = temp_slots; q; q = q->next)
1127 if (q != p && q->addr_taken && q->level == p->level)
1128 q->level--;
1130 p->level--;
1131 p->addr_taken = 0;
1133 return;
1136 /* Otherwise, preserve all non-kept slots at this level. */
1137 for (p = temp_slots; p; p = p->next)
1138 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1139 p->level--;
1142 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1143 with that RTL_EXPR, promote it into a temporary slot at the present
1144 level so it will not be freed when we free slots made in the
1145 RTL_EXPR. */
1147 void
1148 preserve_rtl_expr_result (x)
1149 rtx x;
1151 struct temp_slot *p;
1153 /* If X is not in memory or is at a constant address, it cannot be in
1154 a temporary slot. */
1155 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1156 return;
1158 /* If we can find a match, move it to our level unless it is already at
1159 an upper level. */
1160 p = find_temp_slot_from_address (XEXP (x, 0));
1161 if (p != 0)
1163 p->level = MIN (p->level, temp_slot_level);
1164 p->rtl_expr = 0;
1167 return;
1170 /* Free all temporaries used so far. This is normally called at the end
1171 of generating code for a statement. Don't free any temporaries
1172 currently in use for an RTL_EXPR that hasn't yet been emitted.
1173 We could eventually do better than this since it can be reused while
1174 generating the same RTL_EXPR, but this is complex and probably not
1175 worthwhile. */
1177 void
1178 free_temp_slots ()
1180 struct temp_slot *p;
1182 for (p = temp_slots; p; p = p->next)
1183 if (p->in_use && p->level == temp_slot_level && ! p->keep
1184 && p->rtl_expr == 0)
1185 p->in_use = 0;
1187 combine_temp_slots ();
1190 /* Free all temporary slots used in T, an RTL_EXPR node. */
1192 void
1193 free_temps_for_rtl_expr (t)
1194 tree t;
1196 struct temp_slot *p;
1198 for (p = temp_slots; p; p = p->next)
1199 if (p->rtl_expr == t)
1201 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1202 needs to be preserved. This can happen if a temporary in
1203 the RTL_EXPR was addressed; preserve_temp_slots will move
1204 the temporary into a higher level. */
1205 if (temp_slot_level <= p->level)
1206 p->in_use = 0;
1207 else
1208 p->rtl_expr = NULL_TREE;
1211 combine_temp_slots ();
1214 /* Mark all temporaries ever allocated in this function as not suitable
1215 for reuse until the current level is exited. */
1217 void
1218 mark_all_temps_used ()
1220 struct temp_slot *p;
1222 for (p = temp_slots; p; p = p->next)
1224 p->in_use = p->keep = 1;
1225 p->level = MIN (p->level, temp_slot_level);
1229 /* Push deeper into the nesting level for stack temporaries. */
1231 void
1232 push_temp_slots ()
1234 temp_slot_level++;
1237 /* Likewise, but save the new level as the place to allocate variables
1238 for blocks. */
1240 #if 0
1241 void
1242 push_temp_slots_for_block ()
1244 push_temp_slots ();
1246 var_temp_slot_level = temp_slot_level;
1249 /* Likewise, but save the new level as the place to allocate temporaries
1250 for TARGET_EXPRs. */
1252 void
1253 push_temp_slots_for_target ()
1255 push_temp_slots ();
1257 target_temp_slot_level = temp_slot_level;
1260 /* Set and get the value of target_temp_slot_level. The only
1261 permitted use of these functions is to save and restore this value. */
1264 get_target_temp_slot_level ()
1266 return target_temp_slot_level;
1269 void
1270 set_target_temp_slot_level (level)
1271 int level;
1273 target_temp_slot_level = level;
1275 #endif
1277 /* Pop a temporary nesting level. All slots in use in the current level
1278 are freed. */
1280 void
1281 pop_temp_slots ()
1283 struct temp_slot *p;
1285 for (p = temp_slots; p; p = p->next)
1286 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1287 p->in_use = 0;
1289 combine_temp_slots ();
1291 temp_slot_level--;
1294 /* Initialize temporary slots. */
1296 void
1297 init_temp_slots ()
1299 /* We have not allocated any temporaries yet. */
1300 temp_slots = 0;
1301 temp_slot_level = 0;
1302 var_temp_slot_level = 0;
1303 target_temp_slot_level = 0;
1306 /* Retroactively move an auto variable from a register to a stack slot.
1307 This is done when an address-reference to the variable is seen. */
1309 void
1310 put_var_into_stack (decl)
1311 tree decl;
1313 rtx reg;
1314 enum machine_mode promoted_mode, decl_mode;
1315 struct function *function = 0;
1316 tree context;
1317 int can_use_addressof;
1318 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1319 int usedp = (TREE_USED (decl)
1320 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1322 context = decl_function_context (decl);
1324 /* Get the current rtl used for this object and its original mode. */
1325 reg = (TREE_CODE (decl) == SAVE_EXPR
1326 ? SAVE_EXPR_RTL (decl)
1327 : DECL_RTL_IF_SET (decl));
1329 /* No need to do anything if decl has no rtx yet
1330 since in that case caller is setting TREE_ADDRESSABLE
1331 and a stack slot will be assigned when the rtl is made. */
1332 if (reg == 0)
1333 return;
1335 /* Get the declared mode for this object. */
1336 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1337 : DECL_MODE (decl));
1338 /* Get the mode it's actually stored in. */
1339 promoted_mode = GET_MODE (reg);
1341 /* If this variable comes from an outer function, find that
1342 function's saved context. Don't use find_function_data here,
1343 because it might not be in any active function.
1344 FIXME: Is that really supposed to happen?
1345 It does in ObjC at least. */
1346 if (context != current_function_decl && context != inline_function_decl)
1347 for (function = outer_function_chain; function; function = function->outer)
1348 if (function->decl == context)
1349 break;
1351 /* If this is a variable-size object with a pseudo to address it,
1352 put that pseudo into the stack, if the var is nonlocal. */
1353 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1354 && GET_CODE (reg) == MEM
1355 && GET_CODE (XEXP (reg, 0)) == REG
1356 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1358 reg = XEXP (reg, 0);
1359 decl_mode = promoted_mode = GET_MODE (reg);
1362 can_use_addressof
1363 = (function == 0
1364 && optimize > 0
1365 /* FIXME make it work for promoted modes too */
1366 && decl_mode == promoted_mode
1367 #ifdef NON_SAVING_SETJMP
1368 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1369 #endif
1372 /* If we can't use ADDRESSOF, make sure we see through one we already
1373 generated. */
1374 if (! can_use_addressof && GET_CODE (reg) == MEM
1375 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1376 reg = XEXP (XEXP (reg, 0), 0);
1378 /* Now we should have a value that resides in one or more pseudo regs. */
1380 if (GET_CODE (reg) == REG)
1382 /* If this variable lives in the current function and we don't need
1383 to put things in the stack for the sake of setjmp, try to keep it
1384 in a register until we know we actually need the address. */
1385 if (can_use_addressof)
1386 gen_mem_addressof (reg, decl);
1387 else
1388 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1389 decl_mode, volatilep, 0, usedp, 0);
1391 else if (GET_CODE (reg) == CONCAT)
1393 /* A CONCAT contains two pseudos; put them both in the stack.
1394 We do it so they end up consecutive.
1395 We fixup references to the parts only after we fixup references
1396 to the whole CONCAT, lest we do double fixups for the latter
1397 references. */
1398 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1399 tree part_type = type_for_mode (part_mode, 0);
1400 rtx lopart = XEXP (reg, 0);
1401 rtx hipart = XEXP (reg, 1);
1402 #ifdef FRAME_GROWS_DOWNWARD
1403 /* Since part 0 should have a lower address, do it second. */
1404 put_reg_into_stack (function, hipart, part_type, part_mode,
1405 part_mode, volatilep, 0, 0, 0);
1406 put_reg_into_stack (function, lopart, part_type, part_mode,
1407 part_mode, volatilep, 0, 0, 0);
1408 #else
1409 put_reg_into_stack (function, lopart, part_type, part_mode,
1410 part_mode, volatilep, 0, 0, 0);
1411 put_reg_into_stack (function, hipart, part_type, part_mode,
1412 part_mode, volatilep, 0, 0, 0);
1413 #endif
1415 /* Change the CONCAT into a combined MEM for both parts. */
1416 PUT_CODE (reg, MEM);
1417 MEM_ATTRS (reg) = 0;
1419 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1420 already computed alias sets. Here we want to re-generate. */
1421 if (DECL_P (decl))
1422 SET_DECL_RTL (decl, NULL);
1423 set_mem_attributes (reg, decl, 1);
1424 if (DECL_P (decl))
1425 SET_DECL_RTL (decl, reg);
1427 /* The two parts are in memory order already.
1428 Use the lower parts address as ours. */
1429 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1430 /* Prevent sharing of rtl that might lose. */
1431 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1432 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1433 if (usedp)
1435 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1436 promoted_mode, 0);
1437 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1438 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1441 else
1442 return;
1445 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1446 into the stack frame of FUNCTION (0 means the current function).
1447 DECL_MODE is the machine mode of the user-level data type.
1448 PROMOTED_MODE is the machine mode of the register.
1449 VOLATILE_P is nonzero if this is for a "volatile" decl.
1450 USED_P is nonzero if this reg might have already been used in an insn. */
1452 static void
1453 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1454 original_regno, used_p, ht)
1455 struct function *function;
1456 rtx reg;
1457 tree type;
1458 enum machine_mode promoted_mode, decl_mode;
1459 int volatile_p;
1460 unsigned int original_regno;
1461 int used_p;
1462 struct hash_table *ht;
1464 struct function *func = function ? function : cfun;
1465 rtx new = 0;
1466 unsigned int regno = original_regno;
1468 if (regno == 0)
1469 regno = REGNO (reg);
1471 if (regno < func->x_max_parm_reg)
1472 new = func->x_parm_reg_stack_loc[regno];
1474 if (new == 0)
1475 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1477 PUT_CODE (reg, MEM);
1478 PUT_MODE (reg, decl_mode);
1479 XEXP (reg, 0) = XEXP (new, 0);
1480 MEM_ATTRS (reg) = 0;
1481 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1482 MEM_VOLATILE_P (reg) = volatile_p;
1484 /* If this is a memory ref that contains aggregate components,
1485 mark it as such for cse and loop optimize. If we are reusing a
1486 previously generated stack slot, then we need to copy the bit in
1487 case it was set for other reasons. For instance, it is set for
1488 __builtin_va_alist. */
1489 if (type)
1491 MEM_SET_IN_STRUCT_P (reg,
1492 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1493 set_mem_alias_set (reg, get_alias_set (type));
1496 if (used_p)
1497 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1500 /* Make sure that all refs to the variable, previously made
1501 when it was a register, are fixed up to be valid again.
1502 See function above for meaning of arguments. */
1504 static void
1505 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1506 struct function *function;
1507 rtx reg;
1508 tree type;
1509 enum machine_mode promoted_mode;
1510 struct hash_table *ht;
1512 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1514 if (function != 0)
1516 struct var_refs_queue *temp;
1518 temp
1519 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1520 temp->modified = reg;
1521 temp->promoted_mode = promoted_mode;
1522 temp->unsignedp = unsigned_p;
1523 temp->next = function->fixup_var_refs_queue;
1524 function->fixup_var_refs_queue = temp;
1526 else
1527 /* Variable is local; fix it up now. */
1528 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1531 static void
1532 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1533 rtx var;
1534 enum machine_mode promoted_mode;
1535 int unsignedp;
1536 struct hash_table *ht;
1538 tree pending;
1539 rtx first_insn = get_insns ();
1540 struct sequence_stack *stack = seq_stack;
1541 tree rtl_exps = rtl_expr_chain;
1543 /* If there's a hash table, it must record all uses of VAR. */
1544 if (ht)
1546 if (stack != 0)
1547 abort ();
1548 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1549 return;
1552 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1553 stack == 0);
1555 /* Scan all pending sequences too. */
1556 for (; stack; stack = stack->next)
1558 push_to_full_sequence (stack->first, stack->last);
1559 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1560 stack->next != 0);
1561 /* Update remembered end of sequence
1562 in case we added an insn at the end. */
1563 stack->last = get_last_insn ();
1564 end_sequence ();
1567 /* Scan all waiting RTL_EXPRs too. */
1568 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1570 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1571 if (seq != const0_rtx && seq != 0)
1573 push_to_sequence (seq);
1574 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1575 end_sequence ();
1580 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1581 some part of an insn. Return a struct fixup_replacement whose OLD
1582 value is equal to X. Allocate a new structure if no such entry exists. */
1584 static struct fixup_replacement *
1585 find_fixup_replacement (replacements, x)
1586 struct fixup_replacement **replacements;
1587 rtx x;
1589 struct fixup_replacement *p;
1591 /* See if we have already replaced this. */
1592 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1595 if (p == 0)
1597 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1598 p->old = x;
1599 p->new = 0;
1600 p->next = *replacements;
1601 *replacements = p;
1604 return p;
1607 /* Scan the insn-chain starting with INSN for refs to VAR
1608 and fix them up. TOPLEVEL is nonzero if this chain is the
1609 main chain of insns for the current function. */
1611 static void
1612 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1613 rtx insn;
1614 rtx var;
1615 enum machine_mode promoted_mode;
1616 int unsignedp;
1617 int toplevel;
1619 while (insn)
1621 /* fixup_var_refs_insn might modify insn, so save its next
1622 pointer now. */
1623 rtx next = NEXT_INSN (insn);
1625 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1626 the three sequences they (potentially) contain, and process
1627 them recursively. The CALL_INSN itself is not interesting. */
1629 if (GET_CODE (insn) == CALL_INSN
1630 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1632 int i;
1634 /* Look at the Normal call, sibling call and tail recursion
1635 sequences attached to the CALL_PLACEHOLDER. */
1636 for (i = 0; i < 3; i++)
1638 rtx seq = XEXP (PATTERN (insn), i);
1639 if (seq)
1641 push_to_sequence (seq);
1642 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1643 XEXP (PATTERN (insn), i) = get_insns ();
1644 end_sequence ();
1649 else if (INSN_P (insn))
1650 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1652 insn = next;
1656 /* Look up the insns which reference VAR in HT and fix them up. Other
1657 arguments are the same as fixup_var_refs_insns.
1659 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1660 because the hash table will point straight to the interesting insn
1661 (inside the CALL_PLACEHOLDER). */
1663 static void
1664 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1665 struct hash_table *ht;
1666 rtx var;
1667 enum machine_mode promoted_mode;
1668 int unsignedp;
1670 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1671 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1672 rtx insn_list = ime->insns;
1674 while (insn_list)
1676 rtx insn = XEXP (insn_list, 0);
1678 if (INSN_P (insn))
1679 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
1681 insn_list = XEXP (insn_list, 1);
1686 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1687 the insn under examination, VAR is the variable to fix up
1688 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1689 TOPLEVEL is nonzero if this is the main insn chain for this
1690 function. */
1692 static void
1693 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1694 rtx insn;
1695 rtx var;
1696 enum machine_mode promoted_mode;
1697 int unsignedp;
1698 int toplevel;
1700 rtx call_dest = 0;
1701 rtx set, prev, prev_set;
1702 rtx note;
1704 /* Remember the notes in case we delete the insn. */
1705 note = REG_NOTES (insn);
1707 /* If this is a CLOBBER of VAR, delete it.
1709 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1710 and REG_RETVAL notes too. */
1711 if (GET_CODE (PATTERN (insn)) == CLOBBER
1712 && (XEXP (PATTERN (insn), 0) == var
1713 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1714 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1715 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1717 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1718 /* The REG_LIBCALL note will go away since we are going to
1719 turn INSN into a NOTE, so just delete the
1720 corresponding REG_RETVAL note. */
1721 remove_note (XEXP (note, 0),
1722 find_reg_note (XEXP (note, 0), REG_RETVAL,
1723 NULL_RTX));
1725 delete_insn (insn);
1728 /* The insn to load VAR from a home in the arglist
1729 is now a no-op. When we see it, just delete it.
1730 Similarly if this is storing VAR from a register from which
1731 it was loaded in the previous insn. This will occur
1732 when an ADDRESSOF was made for an arglist slot. */
1733 else if (toplevel
1734 && (set = single_set (insn)) != 0
1735 && SET_DEST (set) == var
1736 /* If this represents the result of an insn group,
1737 don't delete the insn. */
1738 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1739 && (rtx_equal_p (SET_SRC (set), var)
1740 || (GET_CODE (SET_SRC (set)) == REG
1741 && (prev = prev_nonnote_insn (insn)) != 0
1742 && (prev_set = single_set (prev)) != 0
1743 && SET_DEST (prev_set) == SET_SRC (set)
1744 && rtx_equal_p (SET_SRC (prev_set), var))))
1746 delete_insn (insn);
1748 else
1750 struct fixup_replacement *replacements = 0;
1751 rtx next_insn = NEXT_INSN (insn);
1753 if (SMALL_REGISTER_CLASSES)
1755 /* If the insn that copies the results of a CALL_INSN
1756 into a pseudo now references VAR, we have to use an
1757 intermediate pseudo since we want the life of the
1758 return value register to be only a single insn.
1760 If we don't use an intermediate pseudo, such things as
1761 address computations to make the address of VAR valid
1762 if it is not can be placed between the CALL_INSN and INSN.
1764 To make sure this doesn't happen, we record the destination
1765 of the CALL_INSN and see if the next insn uses both that
1766 and VAR. */
1768 if (call_dest != 0 && GET_CODE (insn) == INSN
1769 && reg_mentioned_p (var, PATTERN (insn))
1770 && reg_mentioned_p (call_dest, PATTERN (insn)))
1772 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1774 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1776 PATTERN (insn) = replace_rtx (PATTERN (insn),
1777 call_dest, temp);
1780 if (GET_CODE (insn) == CALL_INSN
1781 && GET_CODE (PATTERN (insn)) == SET)
1782 call_dest = SET_DEST (PATTERN (insn));
1783 else if (GET_CODE (insn) == CALL_INSN
1784 && GET_CODE (PATTERN (insn)) == PARALLEL
1785 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1786 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1787 else
1788 call_dest = 0;
1791 /* See if we have to do anything to INSN now that VAR is in
1792 memory. If it needs to be loaded into a pseudo, use a single
1793 pseudo for the entire insn in case there is a MATCH_DUP
1794 between two operands. We pass a pointer to the head of
1795 a list of struct fixup_replacements. If fixup_var_refs_1
1796 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1797 it will record them in this list.
1799 If it allocated a pseudo for any replacement, we copy into
1800 it here. */
1802 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1803 &replacements);
1805 /* If this is last_parm_insn, and any instructions were output
1806 after it to fix it up, then we must set last_parm_insn to
1807 the last such instruction emitted. */
1808 if (insn == last_parm_insn)
1809 last_parm_insn = PREV_INSN (next_insn);
1811 while (replacements)
1813 struct fixup_replacement *next;
1815 if (GET_CODE (replacements->new) == REG)
1817 rtx insert_before;
1818 rtx seq;
1820 /* OLD might be a (subreg (mem)). */
1821 if (GET_CODE (replacements->old) == SUBREG)
1822 replacements->old
1823 = fixup_memory_subreg (replacements->old, insn, 0);
1824 else
1825 replacements->old
1826 = fixup_stack_1 (replacements->old, insn);
1828 insert_before = insn;
1830 /* If we are changing the mode, do a conversion.
1831 This might be wasteful, but combine.c will
1832 eliminate much of the waste. */
1834 if (GET_MODE (replacements->new)
1835 != GET_MODE (replacements->old))
1837 start_sequence ();
1838 convert_move (replacements->new,
1839 replacements->old, unsignedp);
1840 seq = gen_sequence ();
1841 end_sequence ();
1843 else
1844 seq = gen_move_insn (replacements->new,
1845 replacements->old);
1847 emit_insn_before (seq, insert_before);
1850 next = replacements->next;
1851 free (replacements);
1852 replacements = next;
1856 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1857 But don't touch other insns referred to by reg-notes;
1858 we will get them elsewhere. */
1859 while (note)
1861 if (GET_CODE (note) != INSN_LIST)
1862 XEXP (note, 0)
1863 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1864 note = XEXP (note, 1);
1868 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1869 See if the rtx expression at *LOC in INSN needs to be changed.
1871 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1872 contain a list of original rtx's and replacements. If we find that we need
1873 to modify this insn by replacing a memory reference with a pseudo or by
1874 making a new MEM to implement a SUBREG, we consult that list to see if
1875 we have already chosen a replacement. If none has already been allocated,
1876 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1877 or the SUBREG, as appropriate, to the pseudo. */
1879 static void
1880 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1881 rtx var;
1882 enum machine_mode promoted_mode;
1883 rtx *loc;
1884 rtx insn;
1885 struct fixup_replacement **replacements;
1887 int i;
1888 rtx x = *loc;
1889 RTX_CODE code = GET_CODE (x);
1890 const char *fmt;
1891 rtx tem, tem1;
1892 struct fixup_replacement *replacement;
1894 switch (code)
1896 case ADDRESSOF:
1897 if (XEXP (x, 0) == var)
1899 /* Prevent sharing of rtl that might lose. */
1900 rtx sub = copy_rtx (XEXP (var, 0));
1902 if (! validate_change (insn, loc, sub, 0))
1904 rtx y = gen_reg_rtx (GET_MODE (sub));
1905 rtx seq, new_insn;
1907 /* We should be able to replace with a register or all is lost.
1908 Note that we can't use validate_change to verify this, since
1909 we're not caring for replacing all dups simultaneously. */
1910 if (! validate_replace_rtx (*loc, y, insn))
1911 abort ();
1913 /* Careful! First try to recognize a direct move of the
1914 value, mimicking how things are done in gen_reload wrt
1915 PLUS. Consider what happens when insn is a conditional
1916 move instruction and addsi3 clobbers flags. */
1918 start_sequence ();
1919 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1920 seq = gen_sequence ();
1921 end_sequence ();
1923 if (recog_memoized (new_insn) < 0)
1925 /* That failed. Fall back on force_operand and hope. */
1927 start_sequence ();
1928 sub = force_operand (sub, y);
1929 if (sub != y)
1930 emit_insn (gen_move_insn (y, sub));
1931 seq = gen_sequence ();
1932 end_sequence ();
1935 #ifdef HAVE_cc0
1936 /* Don't separate setter from user. */
1937 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1938 insn = PREV_INSN (insn);
1939 #endif
1941 emit_insn_before (seq, insn);
1944 return;
1946 case MEM:
1947 if (var == x)
1949 /* If we already have a replacement, use it. Otherwise,
1950 try to fix up this address in case it is invalid. */
1952 replacement = find_fixup_replacement (replacements, var);
1953 if (replacement->new)
1955 *loc = replacement->new;
1956 return;
1959 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1961 /* Unless we are forcing memory to register or we changed the mode,
1962 we can leave things the way they are if the insn is valid. */
1964 INSN_CODE (insn) = -1;
1965 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1966 && recog_memoized (insn) >= 0)
1967 return;
1969 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1970 return;
1973 /* If X contains VAR, we need to unshare it here so that we update
1974 each occurrence separately. But all identical MEMs in one insn
1975 must be replaced with the same rtx because of the possibility of
1976 MATCH_DUPs. */
1978 if (reg_mentioned_p (var, x))
1980 replacement = find_fixup_replacement (replacements, x);
1981 if (replacement->new == 0)
1982 replacement->new = copy_most_rtx (x, var);
1984 *loc = x = replacement->new;
1985 code = GET_CODE (x);
1987 break;
1989 case REG:
1990 case CC0:
1991 case PC:
1992 case CONST_INT:
1993 case CONST:
1994 case SYMBOL_REF:
1995 case LABEL_REF:
1996 case CONST_DOUBLE:
1997 case CONST_VECTOR:
1998 return;
2000 case SIGN_EXTRACT:
2001 case ZERO_EXTRACT:
2002 /* Note that in some cases those types of expressions are altered
2003 by optimize_bit_field, and do not survive to get here. */
2004 if (XEXP (x, 0) == var
2005 || (GET_CODE (XEXP (x, 0)) == SUBREG
2006 && SUBREG_REG (XEXP (x, 0)) == var))
2008 /* Get TEM as a valid MEM in the mode presently in the insn.
2010 We don't worry about the possibility of MATCH_DUP here; it
2011 is highly unlikely and would be tricky to handle. */
2013 tem = XEXP (x, 0);
2014 if (GET_CODE (tem) == SUBREG)
2016 if (GET_MODE_BITSIZE (GET_MODE (tem))
2017 > GET_MODE_BITSIZE (GET_MODE (var)))
2019 replacement = find_fixup_replacement (replacements, var);
2020 if (replacement->new == 0)
2021 replacement->new = gen_reg_rtx (GET_MODE (var));
2022 SUBREG_REG (tem) = replacement->new;
2024 /* The following code works only if we have a MEM, so we
2025 need to handle the subreg here. We directly substitute
2026 it assuming that a subreg must be OK here. We already
2027 scheduled a replacement to copy the mem into the
2028 subreg. */
2029 XEXP (x, 0) = tem;
2030 return;
2032 else
2033 tem = fixup_memory_subreg (tem, insn, 0);
2035 else
2036 tem = fixup_stack_1 (tem, insn);
2038 /* Unless we want to load from memory, get TEM into the proper mode
2039 for an extract from memory. This can only be done if the
2040 extract is at a constant position and length. */
2042 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2043 && GET_CODE (XEXP (x, 2)) == CONST_INT
2044 && ! mode_dependent_address_p (XEXP (tem, 0))
2045 && ! MEM_VOLATILE_P (tem))
2047 enum machine_mode wanted_mode = VOIDmode;
2048 enum machine_mode is_mode = GET_MODE (tem);
2049 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2051 if (GET_CODE (x) == ZERO_EXTRACT)
2053 enum machine_mode new_mode
2054 = mode_for_extraction (EP_extzv, 1);
2055 if (new_mode != MAX_MACHINE_MODE)
2056 wanted_mode = new_mode;
2058 else if (GET_CODE (x) == SIGN_EXTRACT)
2060 enum machine_mode new_mode
2061 = mode_for_extraction (EP_extv, 1);
2062 if (new_mode != MAX_MACHINE_MODE)
2063 wanted_mode = new_mode;
2066 /* If we have a narrower mode, we can do something. */
2067 if (wanted_mode != VOIDmode
2068 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2070 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2071 rtx old_pos = XEXP (x, 2);
2072 rtx newmem;
2074 /* If the bytes and bits are counted differently, we
2075 must adjust the offset. */
2076 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2077 offset = (GET_MODE_SIZE (is_mode)
2078 - GET_MODE_SIZE (wanted_mode) - offset);
2080 pos %= GET_MODE_BITSIZE (wanted_mode);
2082 newmem = adjust_address_nv (tem, wanted_mode, offset);
2084 /* Make the change and see if the insn remains valid. */
2085 INSN_CODE (insn) = -1;
2086 XEXP (x, 0) = newmem;
2087 XEXP (x, 2) = GEN_INT (pos);
2089 if (recog_memoized (insn) >= 0)
2090 return;
2092 /* Otherwise, restore old position. XEXP (x, 0) will be
2093 restored later. */
2094 XEXP (x, 2) = old_pos;
2098 /* If we get here, the bitfield extract insn can't accept a memory
2099 reference. Copy the input into a register. */
2101 tem1 = gen_reg_rtx (GET_MODE (tem));
2102 emit_insn_before (gen_move_insn (tem1, tem), insn);
2103 XEXP (x, 0) = tem1;
2104 return;
2106 break;
2108 case SUBREG:
2109 if (SUBREG_REG (x) == var)
2111 /* If this is a special SUBREG made because VAR was promoted
2112 from a wider mode, replace it with VAR and call ourself
2113 recursively, this time saying that the object previously
2114 had its current mode (by virtue of the SUBREG). */
2116 if (SUBREG_PROMOTED_VAR_P (x))
2118 *loc = var;
2119 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2120 return;
2123 /* If this SUBREG makes VAR wider, it has become a paradoxical
2124 SUBREG with VAR in memory, but these aren't allowed at this
2125 stage of the compilation. So load VAR into a pseudo and take
2126 a SUBREG of that pseudo. */
2127 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2129 replacement = find_fixup_replacement (replacements, var);
2130 if (replacement->new == 0)
2131 replacement->new = gen_reg_rtx (promoted_mode);
2132 SUBREG_REG (x) = replacement->new;
2133 return;
2136 /* See if we have already found a replacement for this SUBREG.
2137 If so, use it. Otherwise, make a MEM and see if the insn
2138 is recognized. If not, or if we should force MEM into a register,
2139 make a pseudo for this SUBREG. */
2140 replacement = find_fixup_replacement (replacements, x);
2141 if (replacement->new)
2143 *loc = replacement->new;
2144 return;
2147 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2149 INSN_CODE (insn) = -1;
2150 if (! flag_force_mem && recog_memoized (insn) >= 0)
2151 return;
2153 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2154 return;
2156 break;
2158 case SET:
2159 /* First do special simplification of bit-field references. */
2160 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2161 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2162 optimize_bit_field (x, insn, 0);
2163 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2164 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2165 optimize_bit_field (x, insn, 0);
2167 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2168 into a register and then store it back out. */
2169 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2170 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2171 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2172 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2173 > GET_MODE_SIZE (GET_MODE (var))))
2175 replacement = find_fixup_replacement (replacements, var);
2176 if (replacement->new == 0)
2177 replacement->new = gen_reg_rtx (GET_MODE (var));
2179 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2180 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2183 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2184 insn into a pseudo and store the low part of the pseudo into VAR. */
2185 if (GET_CODE (SET_DEST (x)) == SUBREG
2186 && SUBREG_REG (SET_DEST (x)) == var
2187 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2188 > GET_MODE_SIZE (GET_MODE (var))))
2190 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2191 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2192 tem)),
2193 insn);
2194 break;
2198 rtx dest = SET_DEST (x);
2199 rtx src = SET_SRC (x);
2200 rtx outerdest = dest;
2202 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2203 || GET_CODE (dest) == SIGN_EXTRACT
2204 || GET_CODE (dest) == ZERO_EXTRACT)
2205 dest = XEXP (dest, 0);
2207 if (GET_CODE (src) == SUBREG)
2208 src = SUBREG_REG (src);
2210 /* If VAR does not appear at the top level of the SET
2211 just scan the lower levels of the tree. */
2213 if (src != var && dest != var)
2214 break;
2216 /* We will need to rerecognize this insn. */
2217 INSN_CODE (insn) = -1;
2219 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2220 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2222 /* Since this case will return, ensure we fixup all the
2223 operands here. */
2224 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2225 insn, replacements);
2226 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2227 insn, replacements);
2228 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2229 insn, replacements);
2231 tem = XEXP (outerdest, 0);
2233 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2234 that may appear inside a ZERO_EXTRACT.
2235 This was legitimate when the MEM was a REG. */
2236 if (GET_CODE (tem) == SUBREG
2237 && SUBREG_REG (tem) == var)
2238 tem = fixup_memory_subreg (tem, insn, 0);
2239 else
2240 tem = fixup_stack_1 (tem, insn);
2242 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2243 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2244 && ! mode_dependent_address_p (XEXP (tem, 0))
2245 && ! MEM_VOLATILE_P (tem))
2247 enum machine_mode wanted_mode;
2248 enum machine_mode is_mode = GET_MODE (tem);
2249 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2251 wanted_mode = mode_for_extraction (EP_insv, 0);
2253 /* If we have a narrower mode, we can do something. */
2254 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2256 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2257 rtx old_pos = XEXP (outerdest, 2);
2258 rtx newmem;
2260 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2261 offset = (GET_MODE_SIZE (is_mode)
2262 - GET_MODE_SIZE (wanted_mode) - offset);
2264 pos %= GET_MODE_BITSIZE (wanted_mode);
2266 newmem = adjust_address_nv (tem, wanted_mode, offset);
2268 /* Make the change and see if the insn remains valid. */
2269 INSN_CODE (insn) = -1;
2270 XEXP (outerdest, 0) = newmem;
2271 XEXP (outerdest, 2) = GEN_INT (pos);
2273 if (recog_memoized (insn) >= 0)
2274 return;
2276 /* Otherwise, restore old position. XEXP (x, 0) will be
2277 restored later. */
2278 XEXP (outerdest, 2) = old_pos;
2282 /* If we get here, the bit-field store doesn't allow memory
2283 or isn't located at a constant position. Load the value into
2284 a register, do the store, and put it back into memory. */
2286 tem1 = gen_reg_rtx (GET_MODE (tem));
2287 emit_insn_before (gen_move_insn (tem1, tem), insn);
2288 emit_insn_after (gen_move_insn (tem, tem1), insn);
2289 XEXP (outerdest, 0) = tem1;
2290 return;
2293 /* STRICT_LOW_PART is a no-op on memory references
2294 and it can cause combinations to be unrecognizable,
2295 so eliminate it. */
2297 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2298 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2300 /* A valid insn to copy VAR into or out of a register
2301 must be left alone, to avoid an infinite loop here.
2302 If the reference to VAR is by a subreg, fix that up,
2303 since SUBREG is not valid for a memref.
2304 Also fix up the address of the stack slot.
2306 Note that we must not try to recognize the insn until
2307 after we know that we have valid addresses and no
2308 (subreg (mem ...) ...) constructs, since these interfere
2309 with determining the validity of the insn. */
2311 if ((SET_SRC (x) == var
2312 || (GET_CODE (SET_SRC (x)) == SUBREG
2313 && SUBREG_REG (SET_SRC (x)) == var))
2314 && (GET_CODE (SET_DEST (x)) == REG
2315 || (GET_CODE (SET_DEST (x)) == SUBREG
2316 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2317 && GET_MODE (var) == promoted_mode
2318 && x == single_set (insn))
2320 rtx pat, last;
2322 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2323 if (replacement->new)
2324 SET_SRC (x) = replacement->new;
2325 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2326 SET_SRC (x) = replacement->new
2327 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2328 else
2329 SET_SRC (x) = replacement->new
2330 = fixup_stack_1 (SET_SRC (x), insn);
2332 if (recog_memoized (insn) >= 0)
2333 return;
2335 /* INSN is not valid, but we know that we want to
2336 copy SET_SRC (x) to SET_DEST (x) in some way. So
2337 we generate the move and see whether it requires more
2338 than one insn. If it does, we emit those insns and
2339 delete INSN. Otherwise, we an just replace the pattern
2340 of INSN; we have already verified above that INSN has
2341 no other function that to do X. */
2343 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2344 if (GET_CODE (pat) == SEQUENCE)
2346 last = emit_insn_before (pat, insn);
2348 /* INSN might have REG_RETVAL or other important notes, so
2349 we need to store the pattern of the last insn in the
2350 sequence into INSN similarly to the normal case. LAST
2351 should not have REG_NOTES, but we allow them if INSN has
2352 no REG_NOTES. */
2353 if (REG_NOTES (last) && REG_NOTES (insn))
2354 abort ();
2355 if (REG_NOTES (last))
2356 REG_NOTES (insn) = REG_NOTES (last);
2357 PATTERN (insn) = PATTERN (last);
2359 delete_insn (last);
2361 else
2362 PATTERN (insn) = pat;
2364 return;
2367 if ((SET_DEST (x) == var
2368 || (GET_CODE (SET_DEST (x)) == SUBREG
2369 && SUBREG_REG (SET_DEST (x)) == var))
2370 && (GET_CODE (SET_SRC (x)) == REG
2371 || (GET_CODE (SET_SRC (x)) == SUBREG
2372 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2373 && GET_MODE (var) == promoted_mode
2374 && x == single_set (insn))
2376 rtx pat, last;
2378 if (GET_CODE (SET_DEST (x)) == SUBREG)
2379 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2380 else
2381 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2383 if (recog_memoized (insn) >= 0)
2384 return;
2386 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2387 if (GET_CODE (pat) == SEQUENCE)
2389 last = emit_insn_before (pat, insn);
2391 /* INSN might have REG_RETVAL or other important notes, so
2392 we need to store the pattern of the last insn in the
2393 sequence into INSN similarly to the normal case. LAST
2394 should not have REG_NOTES, but we allow them if INSN has
2395 no REG_NOTES. */
2396 if (REG_NOTES (last) && REG_NOTES (insn))
2397 abort ();
2398 if (REG_NOTES (last))
2399 REG_NOTES (insn) = REG_NOTES (last);
2400 PATTERN (insn) = PATTERN (last);
2402 delete_insn (last);
2404 else
2405 PATTERN (insn) = pat;
2407 return;
2410 /* Otherwise, storing into VAR must be handled specially
2411 by storing into a temporary and copying that into VAR
2412 with a new insn after this one. Note that this case
2413 will be used when storing into a promoted scalar since
2414 the insn will now have different modes on the input
2415 and output and hence will be invalid (except for the case
2416 of setting it to a constant, which does not need any
2417 change if it is valid). We generate extra code in that case,
2418 but combine.c will eliminate it. */
2420 if (dest == var)
2422 rtx temp;
2423 rtx fixeddest = SET_DEST (x);
2425 /* STRICT_LOW_PART can be discarded, around a MEM. */
2426 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2427 fixeddest = XEXP (fixeddest, 0);
2428 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2429 if (GET_CODE (fixeddest) == SUBREG)
2431 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2432 promoted_mode = GET_MODE (fixeddest);
2434 else
2435 fixeddest = fixup_stack_1 (fixeddest, insn);
2437 temp = gen_reg_rtx (promoted_mode);
2439 emit_insn_after (gen_move_insn (fixeddest,
2440 gen_lowpart (GET_MODE (fixeddest),
2441 temp)),
2442 insn);
2444 SET_DEST (x) = temp;
2448 default:
2449 break;
2452 /* Nothing special about this RTX; fix its operands. */
2454 fmt = GET_RTX_FORMAT (code);
2455 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2457 if (fmt[i] == 'e')
2458 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2459 else if (fmt[i] == 'E')
2461 int j;
2462 for (j = 0; j < XVECLEN (x, i); j++)
2463 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2464 insn, replacements);
2469 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2470 return an rtx (MEM:m1 newaddr) which is equivalent.
2471 If any insns must be emitted to compute NEWADDR, put them before INSN.
2473 UNCRITICAL nonzero means accept paradoxical subregs.
2474 This is used for subregs found inside REG_NOTES. */
2476 static rtx
2477 fixup_memory_subreg (x, insn, uncritical)
2478 rtx x;
2479 rtx insn;
2480 int uncritical;
2482 int offset = SUBREG_BYTE (x);
2483 rtx addr = XEXP (SUBREG_REG (x), 0);
2484 enum machine_mode mode = GET_MODE (x);
2485 rtx result;
2487 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2488 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2489 && ! uncritical)
2490 abort ();
2492 if (!flag_force_addr
2493 && memory_address_p (mode, plus_constant (addr, offset)))
2494 /* Shortcut if no insns need be emitted. */
2495 return adjust_address (SUBREG_REG (x), mode, offset);
2497 start_sequence ();
2498 result = adjust_address (SUBREG_REG (x), mode, offset);
2499 emit_insn_before (gen_sequence (), insn);
2500 end_sequence ();
2501 return result;
2504 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2505 Replace subexpressions of X in place.
2506 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2507 Otherwise return X, with its contents possibly altered.
2509 If any insns must be emitted to compute NEWADDR, put them before INSN.
2511 UNCRITICAL is as in fixup_memory_subreg. */
2513 static rtx
2514 walk_fixup_memory_subreg (x, insn, uncritical)
2515 rtx x;
2516 rtx insn;
2517 int uncritical;
2519 enum rtx_code code;
2520 const char *fmt;
2521 int i;
2523 if (x == 0)
2524 return 0;
2526 code = GET_CODE (x);
2528 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2529 return fixup_memory_subreg (x, insn, uncritical);
2531 /* Nothing special about this RTX; fix its operands. */
2533 fmt = GET_RTX_FORMAT (code);
2534 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2536 if (fmt[i] == 'e')
2537 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2538 else if (fmt[i] == 'E')
2540 int j;
2541 for (j = 0; j < XVECLEN (x, i); j++)
2542 XVECEXP (x, i, j)
2543 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2546 return x;
2549 /* For each memory ref within X, if it refers to a stack slot
2550 with an out of range displacement, put the address in a temp register
2551 (emitting new insns before INSN to load these registers)
2552 and alter the memory ref to use that register.
2553 Replace each such MEM rtx with a copy, to avoid clobberage. */
2555 static rtx
2556 fixup_stack_1 (x, insn)
2557 rtx x;
2558 rtx insn;
2560 int i;
2561 RTX_CODE code = GET_CODE (x);
2562 const char *fmt;
2564 if (code == MEM)
2566 rtx ad = XEXP (x, 0);
2567 /* If we have address of a stack slot but it's not valid
2568 (displacement is too large), compute the sum in a register. */
2569 if (GET_CODE (ad) == PLUS
2570 && GET_CODE (XEXP (ad, 0)) == REG
2571 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2572 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2573 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2574 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2575 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2576 #endif
2577 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2578 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2579 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2580 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2582 rtx temp, seq;
2583 if (memory_address_p (GET_MODE (x), ad))
2584 return x;
2586 start_sequence ();
2587 temp = copy_to_reg (ad);
2588 seq = gen_sequence ();
2589 end_sequence ();
2590 emit_insn_before (seq, insn);
2591 return replace_equiv_address (x, temp);
2593 return x;
2596 fmt = GET_RTX_FORMAT (code);
2597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2599 if (fmt[i] == 'e')
2600 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2601 else if (fmt[i] == 'E')
2603 int j;
2604 for (j = 0; j < XVECLEN (x, i); j++)
2605 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2608 return x;
2611 /* Optimization: a bit-field instruction whose field
2612 happens to be a byte or halfword in memory
2613 can be changed to a move instruction.
2615 We call here when INSN is an insn to examine or store into a bit-field.
2616 BODY is the SET-rtx to be altered.
2618 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2619 (Currently this is called only from function.c, and EQUIV_MEM
2620 is always 0.) */
2622 static void
2623 optimize_bit_field (body, insn, equiv_mem)
2624 rtx body;
2625 rtx insn;
2626 rtx *equiv_mem;
2628 rtx bitfield;
2629 int destflag;
2630 rtx seq = 0;
2631 enum machine_mode mode;
2633 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2634 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2635 bitfield = SET_DEST (body), destflag = 1;
2636 else
2637 bitfield = SET_SRC (body), destflag = 0;
2639 /* First check that the field being stored has constant size and position
2640 and is in fact a byte or halfword suitably aligned. */
2642 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2643 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2644 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2645 != BLKmode)
2646 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2648 rtx memref = 0;
2650 /* Now check that the containing word is memory, not a register,
2651 and that it is safe to change the machine mode. */
2653 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2654 memref = XEXP (bitfield, 0);
2655 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2656 && equiv_mem != 0)
2657 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2658 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2659 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2660 memref = SUBREG_REG (XEXP (bitfield, 0));
2661 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2662 && equiv_mem != 0
2663 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2664 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2666 if (memref
2667 && ! mode_dependent_address_p (XEXP (memref, 0))
2668 && ! MEM_VOLATILE_P (memref))
2670 /* Now adjust the address, first for any subreg'ing
2671 that we are now getting rid of,
2672 and then for which byte of the word is wanted. */
2674 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2675 rtx insns;
2677 /* Adjust OFFSET to count bits from low-address byte. */
2678 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2679 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2680 - offset - INTVAL (XEXP (bitfield, 1)));
2682 /* Adjust OFFSET to count bytes from low-address byte. */
2683 offset /= BITS_PER_UNIT;
2684 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2686 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2687 / UNITS_PER_WORD) * UNITS_PER_WORD;
2688 if (BYTES_BIG_ENDIAN)
2689 offset -= (MIN (UNITS_PER_WORD,
2690 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2691 - MIN (UNITS_PER_WORD,
2692 GET_MODE_SIZE (GET_MODE (memref))));
2695 start_sequence ();
2696 memref = adjust_address (memref, mode, offset);
2697 insns = get_insns ();
2698 end_sequence ();
2699 emit_insns_before (insns, insn);
2701 /* Store this memory reference where
2702 we found the bit field reference. */
2704 if (destflag)
2706 validate_change (insn, &SET_DEST (body), memref, 1);
2707 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2709 rtx src = SET_SRC (body);
2710 while (GET_CODE (src) == SUBREG
2711 && SUBREG_BYTE (src) == 0)
2712 src = SUBREG_REG (src);
2713 if (GET_MODE (src) != GET_MODE (memref))
2714 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2715 validate_change (insn, &SET_SRC (body), src, 1);
2717 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2718 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2719 /* This shouldn't happen because anything that didn't have
2720 one of these modes should have got converted explicitly
2721 and then referenced through a subreg.
2722 This is so because the original bit-field was
2723 handled by agg_mode and so its tree structure had
2724 the same mode that memref now has. */
2725 abort ();
2727 else
2729 rtx dest = SET_DEST (body);
2731 while (GET_CODE (dest) == SUBREG
2732 && SUBREG_BYTE (dest) == 0
2733 && (GET_MODE_CLASS (GET_MODE (dest))
2734 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2735 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2736 <= UNITS_PER_WORD))
2737 dest = SUBREG_REG (dest);
2739 validate_change (insn, &SET_DEST (body), dest, 1);
2741 if (GET_MODE (dest) == GET_MODE (memref))
2742 validate_change (insn, &SET_SRC (body), memref, 1);
2743 else
2745 /* Convert the mem ref to the destination mode. */
2746 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2748 start_sequence ();
2749 convert_move (newreg, memref,
2750 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2751 seq = get_insns ();
2752 end_sequence ();
2754 validate_change (insn, &SET_SRC (body), newreg, 1);
2758 /* See if we can convert this extraction or insertion into
2759 a simple move insn. We might not be able to do so if this
2760 was, for example, part of a PARALLEL.
2762 If we succeed, write out any needed conversions. If we fail,
2763 it is hard to guess why we failed, so don't do anything
2764 special; just let the optimization be suppressed. */
2766 if (apply_change_group () && seq)
2767 emit_insns_before (seq, insn);
2772 /* These routines are responsible for converting virtual register references
2773 to the actual hard register references once RTL generation is complete.
2775 The following four variables are used for communication between the
2776 routines. They contain the offsets of the virtual registers from their
2777 respective hard registers. */
2779 static int in_arg_offset;
2780 static int var_offset;
2781 static int dynamic_offset;
2782 static int out_arg_offset;
2783 static int cfa_offset;
2785 /* In most machines, the stack pointer register is equivalent to the bottom
2786 of the stack. */
2788 #ifndef STACK_POINTER_OFFSET
2789 #define STACK_POINTER_OFFSET 0
2790 #endif
2792 /* If not defined, pick an appropriate default for the offset of dynamically
2793 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2794 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2796 #ifndef STACK_DYNAMIC_OFFSET
2798 /* The bottom of the stack points to the actual arguments. If
2799 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2800 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2801 stack space for register parameters is not pushed by the caller, but
2802 rather part of the fixed stack areas and hence not included in
2803 `current_function_outgoing_args_size'. Nevertheless, we must allow
2804 for it when allocating stack dynamic objects. */
2806 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2807 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2808 ((ACCUMULATE_OUTGOING_ARGS \
2809 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2810 + (STACK_POINTER_OFFSET)) \
2812 #else
2813 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2814 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2815 + (STACK_POINTER_OFFSET))
2816 #endif
2817 #endif
2819 /* On most machines, the CFA coincides with the first incoming parm. */
2821 #ifndef ARG_POINTER_CFA_OFFSET
2822 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2823 #endif
2825 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2826 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2827 register, for later use if we do need to force REG into the stack. REG is
2828 overwritten by the MEM like in put_reg_into_stack. */
2831 gen_mem_addressof (reg, decl)
2832 rtx reg;
2833 tree decl;
2835 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2836 REGNO (reg), decl);
2838 /* Calculate this before we start messing with decl's RTL. */
2839 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2841 /* If the original REG was a user-variable, then so is the REG whose
2842 address is being taken. Likewise for unchanging. */
2843 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2844 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2846 PUT_CODE (reg, MEM);
2847 MEM_ATTRS (reg) = 0;
2848 XEXP (reg, 0) = r;
2850 if (decl)
2852 tree type = TREE_TYPE (decl);
2853 enum machine_mode decl_mode
2854 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2855 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2856 : DECL_RTL_IF_SET (decl));
2858 PUT_MODE (reg, decl_mode);
2860 /* Clear DECL_RTL momentarily so functions below will work
2861 properly, then set it again. */
2862 if (DECL_P (decl) && decl_rtl == reg)
2863 SET_DECL_RTL (decl, 0);
2865 set_mem_attributes (reg, decl, 1);
2866 set_mem_alias_set (reg, set);
2868 if (DECL_P (decl) && decl_rtl == reg)
2869 SET_DECL_RTL (decl, reg);
2871 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2872 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2874 else
2875 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2877 return reg;
2880 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2882 void
2883 flush_addressof (decl)
2884 tree decl;
2886 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2887 && DECL_RTL (decl) != 0
2888 && GET_CODE (DECL_RTL (decl)) == MEM
2889 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2890 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2891 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2894 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2896 static void
2897 put_addressof_into_stack (r, ht)
2898 rtx r;
2899 struct hash_table *ht;
2901 tree decl, type;
2902 int volatile_p, used_p;
2904 rtx reg = XEXP (r, 0);
2906 if (GET_CODE (reg) != REG)
2907 abort ();
2909 decl = ADDRESSOF_DECL (r);
2910 if (decl)
2912 type = TREE_TYPE (decl);
2913 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2914 && TREE_THIS_VOLATILE (decl));
2915 used_p = (TREE_USED (decl)
2916 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2918 else
2920 type = NULL_TREE;
2921 volatile_p = 0;
2922 used_p = 1;
2925 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2926 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2929 /* List of replacements made below in purge_addressof_1 when creating
2930 bitfield insertions. */
2931 static rtx purge_bitfield_addressof_replacements;
2933 /* List of replacements made below in purge_addressof_1 for patterns
2934 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2935 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2936 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2937 enough in complex cases, e.g. when some field values can be
2938 extracted by usage MEM with narrower mode. */
2939 static rtx purge_addressof_replacements;
2941 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2942 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2943 the stack. If the function returns FALSE then the replacement could not
2944 be made. */
2946 static bool
2947 purge_addressof_1 (loc, insn, force, store, ht)
2948 rtx *loc;
2949 rtx insn;
2950 int force, store;
2951 struct hash_table *ht;
2953 rtx x;
2954 RTX_CODE code;
2955 int i, j;
2956 const char *fmt;
2957 bool result = true;
2959 /* Re-start here to avoid recursion in common cases. */
2960 restart:
2962 x = *loc;
2963 if (x == 0)
2964 return true;
2966 code = GET_CODE (x);
2968 /* If we don't return in any of the cases below, we will recurse inside
2969 the RTX, which will normally result in any ADDRESSOF being forced into
2970 memory. */
2971 if (code == SET)
2973 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2974 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2975 return result;
2977 else if (code == ADDRESSOF)
2979 rtx sub, insns;
2981 if (GET_CODE (XEXP (x, 0)) != MEM)
2983 put_addressof_into_stack (x, ht);
2984 return true;
2987 /* We must create a copy of the rtx because it was created by
2988 overwriting a REG rtx which is always shared. */
2989 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2990 if (validate_change (insn, loc, sub, 0)
2991 || validate_replace_rtx (x, sub, insn))
2992 return true;
2994 start_sequence ();
2995 sub = force_operand (sub, NULL_RTX);
2996 if (! validate_change (insn, loc, sub, 0)
2997 && ! validate_replace_rtx (x, sub, insn))
2998 abort ();
3000 insns = gen_sequence ();
3001 end_sequence ();
3002 emit_insn_before (insns, insn);
3003 return true;
3006 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3008 rtx sub = XEXP (XEXP (x, 0), 0);
3010 if (GET_CODE (sub) == MEM)
3011 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3012 else if (GET_CODE (sub) == REG
3013 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3015 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3017 int size_x, size_sub;
3019 if (!insn)
3021 /* When processing REG_NOTES look at the list of
3022 replacements done on the insn to find the register that X
3023 was replaced by. */
3024 rtx tem;
3026 for (tem = purge_bitfield_addressof_replacements;
3027 tem != NULL_RTX;
3028 tem = XEXP (XEXP (tem, 1), 1))
3029 if (rtx_equal_p (x, XEXP (tem, 0)))
3031 *loc = XEXP (XEXP (tem, 1), 0);
3032 return true;
3035 /* See comment for purge_addressof_replacements. */
3036 for (tem = purge_addressof_replacements;
3037 tem != NULL_RTX;
3038 tem = XEXP (XEXP (tem, 1), 1))
3039 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3041 rtx z = XEXP (XEXP (tem, 1), 0);
3043 if (GET_MODE (x) == GET_MODE (z)
3044 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3045 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3046 abort ();
3048 /* It can happen that the note may speak of things
3049 in a wider (or just different) mode than the
3050 code did. This is especially true of
3051 REG_RETVAL. */
3053 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3054 z = SUBREG_REG (z);
3056 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3057 && (GET_MODE_SIZE (GET_MODE (x))
3058 > GET_MODE_SIZE (GET_MODE (z))))
3060 /* This can occur as a result in invalid
3061 pointer casts, e.g. float f; ...
3062 *(long long int *)&f.
3063 ??? We could emit a warning here, but
3064 without a line number that wouldn't be
3065 very helpful. */
3066 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3068 else
3069 z = gen_lowpart (GET_MODE (x), z);
3071 *loc = z;
3072 return true;
3075 /* Sometimes we may not be able to find the replacement. For
3076 example when the original insn was a MEM in a wider mode,
3077 and the note is part of a sign extension of a narrowed
3078 version of that MEM. Gcc testcase compile/990829-1.c can
3079 generate an example of this situation. Rather than complain
3080 we return false, which will prompt our caller to remove the
3081 offending note. */
3082 return false;
3085 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3086 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3088 /* Don't even consider working with paradoxical subregs,
3089 or the moral equivalent seen here. */
3090 if (size_x <= size_sub
3091 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3093 /* Do a bitfield insertion to mirror what would happen
3094 in memory. */
3096 rtx val, seq;
3098 if (store)
3100 rtx p = PREV_INSN (insn);
3102 start_sequence ();
3103 val = gen_reg_rtx (GET_MODE (x));
3104 if (! validate_change (insn, loc, val, 0))
3106 /* Discard the current sequence and put the
3107 ADDRESSOF on stack. */
3108 end_sequence ();
3109 goto give_up;
3111 seq = gen_sequence ();
3112 end_sequence ();
3113 emit_insn_before (seq, insn);
3114 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3115 insn, ht);
3117 start_sequence ();
3118 store_bit_field (sub, size_x, 0, GET_MODE (x),
3119 val, GET_MODE_SIZE (GET_MODE (sub)));
3121 /* Make sure to unshare any shared rtl that store_bit_field
3122 might have created. */
3123 unshare_all_rtl_again (get_insns ());
3125 seq = gen_sequence ();
3126 end_sequence ();
3127 p = emit_insn_after (seq, insn);
3128 if (NEXT_INSN (insn))
3129 compute_insns_for_mem (NEXT_INSN (insn),
3130 p ? NEXT_INSN (p) : NULL_RTX,
3131 ht);
3133 else
3135 rtx p = PREV_INSN (insn);
3137 start_sequence ();
3138 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3139 GET_MODE (x), GET_MODE (x),
3140 GET_MODE_SIZE (GET_MODE (sub)));
3142 if (! validate_change (insn, loc, val, 0))
3144 /* Discard the current sequence and put the
3145 ADDRESSOF on stack. */
3146 end_sequence ();
3147 goto give_up;
3150 seq = gen_sequence ();
3151 end_sequence ();
3152 emit_insn_before (seq, insn);
3153 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3154 insn, ht);
3157 /* Remember the replacement so that the same one can be done
3158 on the REG_NOTES. */
3159 purge_bitfield_addressof_replacements
3160 = gen_rtx_EXPR_LIST (VOIDmode, x,
3161 gen_rtx_EXPR_LIST
3162 (VOIDmode, val,
3163 purge_bitfield_addressof_replacements));
3165 /* We replaced with a reg -- all done. */
3166 return true;
3170 else if (validate_change (insn, loc, sub, 0))
3172 /* Remember the replacement so that the same one can be done
3173 on the REG_NOTES. */
3174 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3176 rtx tem;
3178 for (tem = purge_addressof_replacements;
3179 tem != NULL_RTX;
3180 tem = XEXP (XEXP (tem, 1), 1))
3181 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3183 XEXP (XEXP (tem, 1), 0) = sub;
3184 return true;
3186 purge_addressof_replacements
3187 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3188 gen_rtx_EXPR_LIST (VOIDmode, sub,
3189 purge_addressof_replacements));
3190 return true;
3192 goto restart;
3196 give_up:
3197 /* Scan all subexpressions. */
3198 fmt = GET_RTX_FORMAT (code);
3199 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3201 if (*fmt == 'e')
3202 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3203 else if (*fmt == 'E')
3204 for (j = 0; j < XVECLEN (x, i); j++)
3205 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3208 return result;
3211 /* Return a new hash table entry in HT. */
3213 static struct hash_entry *
3214 insns_for_mem_newfunc (he, ht, k)
3215 struct hash_entry *he;
3216 struct hash_table *ht;
3217 hash_table_key k ATTRIBUTE_UNUSED;
3219 struct insns_for_mem_entry *ifmhe;
3220 if (he)
3221 return he;
3223 ifmhe = ((struct insns_for_mem_entry *)
3224 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3225 ifmhe->insns = NULL_RTX;
3227 return &ifmhe->he;
3230 /* Return a hash value for K, a REG. */
3232 static unsigned long
3233 insns_for_mem_hash (k)
3234 hash_table_key k;
3236 /* K is really a RTX. Just use the address as the hash value. */
3237 return (unsigned long) k;
3240 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3242 static bool
3243 insns_for_mem_comp (k1, k2)
3244 hash_table_key k1;
3245 hash_table_key k2;
3247 return k1 == k2;
3250 struct insns_for_mem_walk_info
3252 /* The hash table that we are using to record which INSNs use which
3253 MEMs. */
3254 struct hash_table *ht;
3256 /* The INSN we are currently processing. */
3257 rtx insn;
3259 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3260 to find the insns that use the REGs in the ADDRESSOFs. */
3261 int pass;
3264 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3265 that might be used in an ADDRESSOF expression, record this INSN in
3266 the hash table given by DATA (which is really a pointer to an
3267 insns_for_mem_walk_info structure). */
3269 static int
3270 insns_for_mem_walk (r, data)
3271 rtx *r;
3272 void *data;
3274 struct insns_for_mem_walk_info *ifmwi
3275 = (struct insns_for_mem_walk_info *) data;
3277 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3278 && GET_CODE (XEXP (*r, 0)) == REG)
3279 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3280 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3282 /* Lookup this MEM in the hashtable, creating it if necessary. */
3283 struct insns_for_mem_entry *ifme
3284 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3286 /*create=*/0,
3287 /*copy=*/0);
3289 /* If we have not already recorded this INSN, do so now. Since
3290 we process the INSNs in order, we know that if we have
3291 recorded it it must be at the front of the list. */
3292 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3293 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3294 ifme->insns);
3297 return 0;
3300 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3301 which REGs in HT. */
3303 static void
3304 compute_insns_for_mem (insns, last_insn, ht)
3305 rtx insns;
3306 rtx last_insn;
3307 struct hash_table *ht;
3309 rtx insn;
3310 struct insns_for_mem_walk_info ifmwi;
3311 ifmwi.ht = ht;
3313 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3314 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3315 if (INSN_P (insn))
3317 ifmwi.insn = insn;
3318 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3322 /* Helper function for purge_addressof called through for_each_rtx.
3323 Returns true iff the rtl is an ADDRESSOF. */
3325 static int
3326 is_addressof (rtl, data)
3327 rtx *rtl;
3328 void *data ATTRIBUTE_UNUSED;
3330 return GET_CODE (*rtl) == ADDRESSOF;
3333 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3334 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3335 stack. */
3337 void
3338 purge_addressof (insns)
3339 rtx insns;
3341 rtx insn;
3342 struct hash_table ht;
3344 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3345 requires a fixup pass over the instruction stream to correct
3346 INSNs that depended on the REG being a REG, and not a MEM. But,
3347 these fixup passes are slow. Furthermore, most MEMs are not
3348 mentioned in very many instructions. So, we speed up the process
3349 by pre-calculating which REGs occur in which INSNs; that allows
3350 us to perform the fixup passes much more quickly. */
3351 hash_table_init (&ht,
3352 insns_for_mem_newfunc,
3353 insns_for_mem_hash,
3354 insns_for_mem_comp);
3355 compute_insns_for_mem (insns, NULL_RTX, &ht);
3357 for (insn = insns; insn; insn = NEXT_INSN (insn))
3358 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3359 || GET_CODE (insn) == CALL_INSN)
3361 if (! purge_addressof_1 (&PATTERN (insn), insn,
3362 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3363 /* If we could not replace the ADDRESSOFs in the insn,
3364 something is wrong. */
3365 abort ();
3367 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3369 /* If we could not replace the ADDRESSOFs in the insn's notes,
3370 we can just remove the offending notes instead. */
3371 rtx note;
3373 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3375 /* If we find a REG_RETVAL note then the insn is a libcall.
3376 Such insns must have REG_EQUAL notes as well, in order
3377 for later passes of the compiler to work. So it is not
3378 safe to delete the notes here, and instead we abort. */
3379 if (REG_NOTE_KIND (note) == REG_RETVAL)
3380 abort ();
3381 if (for_each_rtx (&note, is_addressof, NULL))
3382 remove_note (insn, note);
3387 /* Clean up. */
3388 hash_table_free (&ht);
3389 purge_bitfield_addressof_replacements = 0;
3390 purge_addressof_replacements = 0;
3392 /* REGs are shared. purge_addressof will destructively replace a REG
3393 with a MEM, which creates shared MEMs.
3395 Unfortunately, the children of put_reg_into_stack assume that MEMs
3396 referring to the same stack slot are shared (fixup_var_refs and
3397 the associated hash table code).
3399 So, we have to do another unsharing pass after we have flushed any
3400 REGs that had their address taken into the stack.
3402 It may be worth tracking whether or not we converted any REGs into
3403 MEMs to avoid this overhead when it is not needed. */
3404 unshare_all_rtl_again (get_insns ());
3407 /* Convert a SET of a hard subreg to a set of the appropriate hard
3408 register. A subroutine of purge_hard_subreg_sets. */
3410 static void
3411 purge_single_hard_subreg_set (pattern)
3412 rtx pattern;
3414 rtx reg = SET_DEST (pattern);
3415 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3416 int offset = 0;
3418 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3419 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3421 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3422 GET_MODE (SUBREG_REG (reg)),
3423 SUBREG_BYTE (reg),
3424 GET_MODE (reg));
3425 reg = SUBREG_REG (reg);
3429 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3431 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3432 SET_DEST (pattern) = reg;
3436 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3437 only such SETs that we expect to see are those left in because
3438 integrate can't handle sets of parts of a return value register.
3440 We don't use alter_subreg because we only want to eliminate subregs
3441 of hard registers. */
3443 void
3444 purge_hard_subreg_sets (insn)
3445 rtx insn;
3447 for (; insn; insn = NEXT_INSN (insn))
3449 if (INSN_P (insn))
3451 rtx pattern = PATTERN (insn);
3452 switch (GET_CODE (pattern))
3454 case SET:
3455 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3456 purge_single_hard_subreg_set (pattern);
3457 break;
3458 case PARALLEL:
3460 int j;
3461 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3463 rtx inner_pattern = XVECEXP (pattern, 0, j);
3464 if (GET_CODE (inner_pattern) == SET
3465 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3466 purge_single_hard_subreg_set (inner_pattern);
3469 break;
3470 default:
3471 break;
3477 /* Pass through the INSNS of function FNDECL and convert virtual register
3478 references to hard register references. */
3480 void
3481 instantiate_virtual_regs (fndecl, insns)
3482 tree fndecl;
3483 rtx insns;
3485 rtx insn;
3486 unsigned int i;
3488 /* Compute the offsets to use for this function. */
3489 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3490 var_offset = STARTING_FRAME_OFFSET;
3491 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3492 out_arg_offset = STACK_POINTER_OFFSET;
3493 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3495 /* Scan all variables and parameters of this function. For each that is
3496 in memory, instantiate all virtual registers if the result is a valid
3497 address. If not, we do it later. That will handle most uses of virtual
3498 regs on many machines. */
3499 instantiate_decls (fndecl, 1);
3501 /* Initialize recognition, indicating that volatile is OK. */
3502 init_recog ();
3504 /* Scan through all the insns, instantiating every virtual register still
3505 present. */
3506 for (insn = insns; insn; insn = NEXT_INSN (insn))
3507 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3508 || GET_CODE (insn) == CALL_INSN)
3510 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3511 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3512 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3513 if (GET_CODE (insn) == CALL_INSN)
3514 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3515 NULL_RTX, 0);
3518 /* Instantiate the stack slots for the parm registers, for later use in
3519 addressof elimination. */
3520 for (i = 0; i < max_parm_reg; ++i)
3521 if (parm_reg_stack_loc[i])
3522 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3524 /* Now instantiate the remaining register equivalences for debugging info.
3525 These will not be valid addresses. */
3526 instantiate_decls (fndecl, 0);
3528 /* Indicate that, from now on, assign_stack_local should use
3529 frame_pointer_rtx. */
3530 virtuals_instantiated = 1;
3533 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3534 all virtual registers in their DECL_RTL's.
3536 If VALID_ONLY, do this only if the resulting address is still valid.
3537 Otherwise, always do it. */
3539 static void
3540 instantiate_decls (fndecl, valid_only)
3541 tree fndecl;
3542 int valid_only;
3544 tree decl;
3546 /* Process all parameters of the function. */
3547 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3549 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3550 HOST_WIDE_INT size_rtl;
3552 instantiate_decl (DECL_RTL (decl), size, valid_only);
3554 /* If the parameter was promoted, then the incoming RTL mode may be
3555 larger than the declared type size. We must use the larger of
3556 the two sizes. */
3557 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3558 size = MAX (size_rtl, size);
3559 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3562 /* Now process all variables defined in the function or its subblocks. */
3563 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3566 /* Subroutine of instantiate_decls: Process all decls in the given
3567 BLOCK node and all its subblocks. */
3569 static void
3570 instantiate_decls_1 (let, valid_only)
3571 tree let;
3572 int valid_only;
3574 tree t;
3576 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3577 if (DECL_RTL_SET_P (t))
3578 instantiate_decl (DECL_RTL (t),
3579 int_size_in_bytes (TREE_TYPE (t)),
3580 valid_only);
3582 /* Process all subblocks. */
3583 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3584 instantiate_decls_1 (t, valid_only);
3587 /* Subroutine of the preceding procedures: Given RTL representing a
3588 decl and the size of the object, do any instantiation required.
3590 If VALID_ONLY is non-zero, it means that the RTL should only be
3591 changed if the new address is valid. */
3593 static void
3594 instantiate_decl (x, size, valid_only)
3595 rtx x;
3596 HOST_WIDE_INT size;
3597 int valid_only;
3599 enum machine_mode mode;
3600 rtx addr;
3602 /* If this is not a MEM, no need to do anything. Similarly if the
3603 address is a constant or a register that is not a virtual register. */
3605 if (x == 0 || GET_CODE (x) != MEM)
3606 return;
3608 addr = XEXP (x, 0);
3609 if (CONSTANT_P (addr)
3610 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3611 || (GET_CODE (addr) == REG
3612 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3613 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3614 return;
3616 /* If we should only do this if the address is valid, copy the address.
3617 We need to do this so we can undo any changes that might make the
3618 address invalid. This copy is unfortunate, but probably can't be
3619 avoided. */
3621 if (valid_only)
3622 addr = copy_rtx (addr);
3624 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3626 if (valid_only && size >= 0)
3628 unsigned HOST_WIDE_INT decl_size = size;
3630 /* Now verify that the resulting address is valid for every integer or
3631 floating-point mode up to and including SIZE bytes long. We do this
3632 since the object might be accessed in any mode and frame addresses
3633 are shared. */
3635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3636 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3637 mode = GET_MODE_WIDER_MODE (mode))
3638 if (! memory_address_p (mode, addr))
3639 return;
3641 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3642 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3643 mode = GET_MODE_WIDER_MODE (mode))
3644 if (! memory_address_p (mode, addr))
3645 return;
3648 /* Put back the address now that we have updated it and we either know
3649 it is valid or we don't care whether it is valid. */
3651 XEXP (x, 0) = addr;
3654 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3655 is a virtual register, return the equivalent hard register and set the
3656 offset indirectly through the pointer. Otherwise, return 0. */
3658 static rtx
3659 instantiate_new_reg (x, poffset)
3660 rtx x;
3661 HOST_WIDE_INT *poffset;
3663 rtx new;
3664 HOST_WIDE_INT offset;
3666 if (x == virtual_incoming_args_rtx)
3667 new = arg_pointer_rtx, offset = in_arg_offset;
3668 else if (x == virtual_stack_vars_rtx)
3669 new = frame_pointer_rtx, offset = var_offset;
3670 else if (x == virtual_stack_dynamic_rtx)
3671 new = stack_pointer_rtx, offset = dynamic_offset;
3672 else if (x == virtual_outgoing_args_rtx)
3673 new = stack_pointer_rtx, offset = out_arg_offset;
3674 else if (x == virtual_cfa_rtx)
3675 new = arg_pointer_rtx, offset = cfa_offset;
3676 else
3677 return 0;
3679 *poffset = offset;
3680 return new;
3683 /* Given a pointer to a piece of rtx and an optional pointer to the
3684 containing object, instantiate any virtual registers present in it.
3686 If EXTRA_INSNS, we always do the replacement and generate
3687 any extra insns before OBJECT. If it zero, we do nothing if replacement
3688 is not valid.
3690 Return 1 if we either had nothing to do or if we were able to do the
3691 needed replacement. Return 0 otherwise; we only return zero if
3692 EXTRA_INSNS is zero.
3694 We first try some simple transformations to avoid the creation of extra
3695 pseudos. */
3697 static int
3698 instantiate_virtual_regs_1 (loc, object, extra_insns)
3699 rtx *loc;
3700 rtx object;
3701 int extra_insns;
3703 rtx x;
3704 RTX_CODE code;
3705 rtx new = 0;
3706 HOST_WIDE_INT offset = 0;
3707 rtx temp;
3708 rtx seq;
3709 int i, j;
3710 const char *fmt;
3712 /* Re-start here to avoid recursion in common cases. */
3713 restart:
3715 x = *loc;
3716 if (x == 0)
3717 return 1;
3719 code = GET_CODE (x);
3721 /* Check for some special cases. */
3722 switch (code)
3724 case CONST_INT:
3725 case CONST_DOUBLE:
3726 case CONST_VECTOR:
3727 case CONST:
3728 case SYMBOL_REF:
3729 case CODE_LABEL:
3730 case PC:
3731 case CC0:
3732 case ASM_INPUT:
3733 case ADDR_VEC:
3734 case ADDR_DIFF_VEC:
3735 case RETURN:
3736 return 1;
3738 case SET:
3739 /* We are allowed to set the virtual registers. This means that
3740 the actual register should receive the source minus the
3741 appropriate offset. This is used, for example, in the handling
3742 of non-local gotos. */
3743 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3745 rtx src = SET_SRC (x);
3747 /* We are setting the register, not using it, so the relevant
3748 offset is the negative of the offset to use were we using
3749 the register. */
3750 offset = - offset;
3751 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3753 /* The only valid sources here are PLUS or REG. Just do
3754 the simplest possible thing to handle them. */
3755 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3756 abort ();
3758 start_sequence ();
3759 if (GET_CODE (src) != REG)
3760 temp = force_operand (src, NULL_RTX);
3761 else
3762 temp = src;
3763 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3764 seq = get_insns ();
3765 end_sequence ();
3767 emit_insns_before (seq, object);
3768 SET_DEST (x) = new;
3770 if (! validate_change (object, &SET_SRC (x), temp, 0)
3771 || ! extra_insns)
3772 abort ();
3774 return 1;
3777 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3778 loc = &SET_SRC (x);
3779 goto restart;
3781 case PLUS:
3782 /* Handle special case of virtual register plus constant. */
3783 if (CONSTANT_P (XEXP (x, 1)))
3785 rtx old, new_offset;
3787 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3788 if (GET_CODE (XEXP (x, 0)) == PLUS)
3790 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3792 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3793 extra_insns);
3794 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3796 else
3798 loc = &XEXP (x, 0);
3799 goto restart;
3803 #ifdef POINTERS_EXTEND_UNSIGNED
3804 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3805 we can commute the PLUS and SUBREG because pointers into the
3806 frame are well-behaved. */
3807 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3808 && GET_CODE (XEXP (x, 1)) == CONST_INT
3809 && 0 != (new
3810 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3811 &offset))
3812 && validate_change (object, loc,
3813 plus_constant (gen_lowpart (ptr_mode,
3814 new),
3815 offset
3816 + INTVAL (XEXP (x, 1))),
3818 return 1;
3819 #endif
3820 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3822 /* We know the second operand is a constant. Unless the
3823 first operand is a REG (which has been already checked),
3824 it needs to be checked. */
3825 if (GET_CODE (XEXP (x, 0)) != REG)
3827 loc = &XEXP (x, 0);
3828 goto restart;
3830 return 1;
3833 new_offset = plus_constant (XEXP (x, 1), offset);
3835 /* If the new constant is zero, try to replace the sum with just
3836 the register. */
3837 if (new_offset == const0_rtx
3838 && validate_change (object, loc, new, 0))
3839 return 1;
3841 /* Next try to replace the register and new offset.
3842 There are two changes to validate here and we can't assume that
3843 in the case of old offset equals new just changing the register
3844 will yield a valid insn. In the interests of a little efficiency,
3845 however, we only call validate change once (we don't queue up the
3846 changes and then call apply_change_group). */
3848 old = XEXP (x, 0);
3849 if (offset == 0
3850 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3851 : (XEXP (x, 0) = new,
3852 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3854 if (! extra_insns)
3856 XEXP (x, 0) = old;
3857 return 0;
3860 /* Otherwise copy the new constant into a register and replace
3861 constant with that register. */
3862 temp = gen_reg_rtx (Pmode);
3863 XEXP (x, 0) = new;
3864 if (validate_change (object, &XEXP (x, 1), temp, 0))
3865 emit_insn_before (gen_move_insn (temp, new_offset), object);
3866 else
3868 /* If that didn't work, replace this expression with a
3869 register containing the sum. */
3871 XEXP (x, 0) = old;
3872 new = gen_rtx_PLUS (Pmode, new, new_offset);
3874 start_sequence ();
3875 temp = force_operand (new, NULL_RTX);
3876 seq = get_insns ();
3877 end_sequence ();
3879 emit_insns_before (seq, object);
3880 if (! validate_change (object, loc, temp, 0)
3881 && ! validate_replace_rtx (x, temp, object))
3882 abort ();
3886 return 1;
3889 /* Fall through to generic two-operand expression case. */
3890 case EXPR_LIST:
3891 case CALL:
3892 case COMPARE:
3893 case MINUS:
3894 case MULT:
3895 case DIV: case UDIV:
3896 case MOD: case UMOD:
3897 case AND: case IOR: case XOR:
3898 case ROTATERT: case ROTATE:
3899 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3900 case NE: case EQ:
3901 case GE: case GT: case GEU: case GTU:
3902 case LE: case LT: case LEU: case LTU:
3903 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3904 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3905 loc = &XEXP (x, 0);
3906 goto restart;
3908 case MEM:
3909 /* Most cases of MEM that convert to valid addresses have already been
3910 handled by our scan of decls. The only special handling we
3911 need here is to make a copy of the rtx to ensure it isn't being
3912 shared if we have to change it to a pseudo.
3914 If the rtx is a simple reference to an address via a virtual register,
3915 it can potentially be shared. In such cases, first try to make it
3916 a valid address, which can also be shared. Otherwise, copy it and
3917 proceed normally.
3919 First check for common cases that need no processing. These are
3920 usually due to instantiation already being done on a previous instance
3921 of a shared rtx. */
3923 temp = XEXP (x, 0);
3924 if (CONSTANT_ADDRESS_P (temp)
3925 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3926 || temp == arg_pointer_rtx
3927 #endif
3928 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3929 || temp == hard_frame_pointer_rtx
3930 #endif
3931 || temp == frame_pointer_rtx)
3932 return 1;
3934 if (GET_CODE (temp) == PLUS
3935 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3936 && (XEXP (temp, 0) == frame_pointer_rtx
3937 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3938 || XEXP (temp, 0) == hard_frame_pointer_rtx
3939 #endif
3940 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3941 || XEXP (temp, 0) == arg_pointer_rtx
3942 #endif
3944 return 1;
3946 if (temp == virtual_stack_vars_rtx
3947 || temp == virtual_incoming_args_rtx
3948 || (GET_CODE (temp) == PLUS
3949 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3950 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3951 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3953 /* This MEM may be shared. If the substitution can be done without
3954 the need to generate new pseudos, we want to do it in place
3955 so all copies of the shared rtx benefit. The call below will
3956 only make substitutions if the resulting address is still
3957 valid.
3959 Note that we cannot pass X as the object in the recursive call
3960 since the insn being processed may not allow all valid
3961 addresses. However, if we were not passed on object, we can
3962 only modify X without copying it if X will have a valid
3963 address.
3965 ??? Also note that this can still lose if OBJECT is an insn that
3966 has less restrictions on an address that some other insn.
3967 In that case, we will modify the shared address. This case
3968 doesn't seem very likely, though. One case where this could
3969 happen is in the case of a USE or CLOBBER reference, but we
3970 take care of that below. */
3972 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3973 object ? object : x, 0))
3974 return 1;
3976 /* Otherwise make a copy and process that copy. We copy the entire
3977 RTL expression since it might be a PLUS which could also be
3978 shared. */
3979 *loc = x = copy_rtx (x);
3982 /* Fall through to generic unary operation case. */
3983 case PREFETCH:
3984 case SUBREG:
3985 case STRICT_LOW_PART:
3986 case NEG: case NOT:
3987 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3988 case SIGN_EXTEND: case ZERO_EXTEND:
3989 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3990 case FLOAT: case FIX:
3991 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3992 case ABS:
3993 case SQRT:
3994 case FFS:
3995 /* These case either have just one operand or we know that we need not
3996 check the rest of the operands. */
3997 loc = &XEXP (x, 0);
3998 goto restart;
4000 case USE:
4001 case CLOBBER:
4002 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4003 go ahead and make the invalid one, but do it to a copy. For a REG,
4004 just make the recursive call, since there's no chance of a problem. */
4006 if ((GET_CODE (XEXP (x, 0)) == MEM
4007 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4009 || (GET_CODE (XEXP (x, 0)) == REG
4010 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4011 return 1;
4013 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4014 loc = &XEXP (x, 0);
4015 goto restart;
4017 case REG:
4018 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4019 in front of this insn and substitute the temporary. */
4020 if ((new = instantiate_new_reg (x, &offset)) != 0)
4022 temp = plus_constant (new, offset);
4023 if (!validate_change (object, loc, temp, 0))
4025 if (! extra_insns)
4026 return 0;
4028 start_sequence ();
4029 temp = force_operand (temp, NULL_RTX);
4030 seq = get_insns ();
4031 end_sequence ();
4033 emit_insns_before (seq, object);
4034 if (! validate_change (object, loc, temp, 0)
4035 && ! validate_replace_rtx (x, temp, object))
4036 abort ();
4040 return 1;
4042 case ADDRESSOF:
4043 if (GET_CODE (XEXP (x, 0)) == REG)
4044 return 1;
4046 else if (GET_CODE (XEXP (x, 0)) == MEM)
4048 /* If we have a (addressof (mem ..)), do any instantiation inside
4049 since we know we'll be making the inside valid when we finally
4050 remove the ADDRESSOF. */
4051 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4052 return 1;
4054 break;
4056 default:
4057 break;
4060 /* Scan all subexpressions. */
4061 fmt = GET_RTX_FORMAT (code);
4062 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4063 if (*fmt == 'e')
4065 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4066 return 0;
4068 else if (*fmt == 'E')
4069 for (j = 0; j < XVECLEN (x, i); j++)
4070 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4071 extra_insns))
4072 return 0;
4074 return 1;
4077 /* Optimization: assuming this function does not receive nonlocal gotos,
4078 delete the handlers for such, as well as the insns to establish
4079 and disestablish them. */
4081 static void
4082 delete_handlers ()
4084 rtx insn;
4085 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4087 /* Delete the handler by turning off the flag that would
4088 prevent jump_optimize from deleting it.
4089 Also permit deletion of the nonlocal labels themselves
4090 if nothing local refers to them. */
4091 if (GET_CODE (insn) == CODE_LABEL)
4093 tree t, last_t;
4095 LABEL_PRESERVE_P (insn) = 0;
4097 /* Remove it from the nonlocal_label list, to avoid confusing
4098 flow. */
4099 for (t = nonlocal_labels, last_t = 0; t;
4100 last_t = t, t = TREE_CHAIN (t))
4101 if (DECL_RTL (TREE_VALUE (t)) == insn)
4102 break;
4103 if (t)
4105 if (! last_t)
4106 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4107 else
4108 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4111 if (GET_CODE (insn) == INSN)
4113 int can_delete = 0;
4114 rtx t;
4115 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4116 if (reg_mentioned_p (t, PATTERN (insn)))
4118 can_delete = 1;
4119 break;
4121 if (can_delete
4122 || (nonlocal_goto_stack_level != 0
4123 && reg_mentioned_p (nonlocal_goto_stack_level,
4124 PATTERN (insn))))
4125 delete_related_insns (insn);
4131 max_parm_reg_num ()
4133 return max_parm_reg;
4136 /* Return the first insn following those generated by `assign_parms'. */
4139 get_first_nonparm_insn ()
4141 if (last_parm_insn)
4142 return NEXT_INSN (last_parm_insn);
4143 return get_insns ();
4146 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4147 Crash if there is none. */
4150 get_first_block_beg ()
4152 rtx searcher;
4153 rtx insn = get_first_nonparm_insn ();
4155 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4156 if (GET_CODE (searcher) == NOTE
4157 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4158 return searcher;
4160 abort (); /* Invalid call to this function. (See comments above.) */
4161 return NULL_RTX;
4164 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4165 This means a type for which function calls must pass an address to the
4166 function or get an address back from the function.
4167 EXP may be a type node or an expression (whose type is tested). */
4170 aggregate_value_p (exp)
4171 tree exp;
4173 int i, regno, nregs;
4174 rtx reg;
4176 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4178 if (TREE_CODE (type) == VOID_TYPE)
4179 return 0;
4180 if (RETURN_IN_MEMORY (type))
4181 return 1;
4182 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4183 and thus can't be returned in registers. */
4184 if (TREE_ADDRESSABLE (type))
4185 return 1;
4186 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4187 return 1;
4188 /* Make sure we have suitable call-clobbered regs to return
4189 the value in; if not, we must return it in memory. */
4190 reg = hard_function_value (type, 0, 0);
4192 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4193 it is OK. */
4194 if (GET_CODE (reg) != REG)
4195 return 0;
4197 regno = REGNO (reg);
4198 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4199 for (i = 0; i < nregs; i++)
4200 if (! call_used_regs[regno + i])
4201 return 1;
4202 return 0;
4205 /* Assign RTL expressions to the function's parameters.
4206 This may involve copying them into registers and using
4207 those registers as the RTL for them. */
4209 void
4210 assign_parms (fndecl)
4211 tree fndecl;
4213 tree parm;
4214 rtx entry_parm = 0;
4215 rtx stack_parm = 0;
4216 CUMULATIVE_ARGS args_so_far;
4217 enum machine_mode promoted_mode, passed_mode;
4218 enum machine_mode nominal_mode, promoted_nominal_mode;
4219 int unsignedp;
4220 /* Total space needed so far for args on the stack,
4221 given as a constant and a tree-expression. */
4222 struct args_size stack_args_size;
4223 tree fntype = TREE_TYPE (fndecl);
4224 tree fnargs = DECL_ARGUMENTS (fndecl);
4225 /* This is used for the arg pointer when referring to stack args. */
4226 rtx internal_arg_pointer;
4227 /* This is a dummy PARM_DECL that we used for the function result if
4228 the function returns a structure. */
4229 tree function_result_decl = 0;
4230 #ifdef SETUP_INCOMING_VARARGS
4231 int varargs_setup = 0;
4232 #endif
4233 rtx conversion_insns = 0;
4234 struct args_size alignment_pad;
4236 /* Nonzero if the last arg is named `__builtin_va_alist',
4237 which is used on some machines for old-fashioned non-ANSI varargs.h;
4238 this should be stuck onto the stack as if it had arrived there. */
4239 int hide_last_arg
4240 = (current_function_varargs
4241 && fnargs
4242 && (parm = tree_last (fnargs)) != 0
4243 && DECL_NAME (parm)
4244 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4245 "__builtin_va_alist")));
4247 /* Nonzero if function takes extra anonymous args.
4248 This means the last named arg must be on the stack
4249 right before the anonymous ones. */
4250 int stdarg
4251 = (TYPE_ARG_TYPES (fntype) != 0
4252 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4253 != void_type_node));
4255 current_function_stdarg = stdarg;
4257 /* If the reg that the virtual arg pointer will be translated into is
4258 not a fixed reg or is the stack pointer, make a copy of the virtual
4259 arg pointer, and address parms via the copy. The frame pointer is
4260 considered fixed even though it is not marked as such.
4262 The second time through, simply use ap to avoid generating rtx. */
4264 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4265 || ! (fixed_regs[ARG_POINTER_REGNUM]
4266 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4267 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4268 else
4269 internal_arg_pointer = virtual_incoming_args_rtx;
4270 current_function_internal_arg_pointer = internal_arg_pointer;
4272 stack_args_size.constant = 0;
4273 stack_args_size.var = 0;
4275 /* If struct value address is treated as the first argument, make it so. */
4276 if (aggregate_value_p (DECL_RESULT (fndecl))
4277 && ! current_function_returns_pcc_struct
4278 && struct_value_incoming_rtx == 0)
4280 tree type = build_pointer_type (TREE_TYPE (fntype));
4282 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4284 DECL_ARG_TYPE (function_result_decl) = type;
4285 TREE_CHAIN (function_result_decl) = fnargs;
4286 fnargs = function_result_decl;
4289 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4290 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4292 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4293 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4294 #else
4295 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4296 #endif
4298 /* We haven't yet found an argument that we must push and pretend the
4299 caller did. */
4300 current_function_pretend_args_size = 0;
4302 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4304 struct args_size stack_offset;
4305 struct args_size arg_size;
4306 int passed_pointer = 0;
4307 int did_conversion = 0;
4308 tree passed_type = DECL_ARG_TYPE (parm);
4309 tree nominal_type = TREE_TYPE (parm);
4310 int pretend_named;
4311 int last_named = 0, named_arg;
4313 /* Set LAST_NAMED if this is last named arg before last
4314 anonymous args. */
4315 if (stdarg || current_function_varargs)
4317 tree tem;
4319 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4320 if (DECL_NAME (tem))
4321 break;
4323 if (tem == 0)
4324 last_named = 1;
4326 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4327 most machines, if this is a varargs/stdarg function, then we treat
4328 the last named arg as if it were anonymous too. */
4329 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4331 if (TREE_TYPE (parm) == error_mark_node
4332 /* This can happen after weird syntax errors
4333 or if an enum type is defined among the parms. */
4334 || TREE_CODE (parm) != PARM_DECL
4335 || passed_type == NULL)
4337 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4338 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4339 TREE_USED (parm) = 1;
4340 continue;
4343 /* For varargs.h function, save info about regs and stack space
4344 used by the individual args, not including the va_alist arg. */
4345 if (hide_last_arg && last_named)
4346 current_function_args_info = args_so_far;
4348 /* Find mode of arg as it is passed, and mode of arg
4349 as it should be during execution of this function. */
4350 passed_mode = TYPE_MODE (passed_type);
4351 nominal_mode = TYPE_MODE (nominal_type);
4353 /* If the parm's mode is VOID, its value doesn't matter,
4354 and avoid the usual things like emit_move_insn that could crash. */
4355 if (nominal_mode == VOIDmode)
4357 SET_DECL_RTL (parm, const0_rtx);
4358 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4359 continue;
4362 /* If the parm is to be passed as a transparent union, use the
4363 type of the first field for the tests below. We have already
4364 verified that the modes are the same. */
4365 if (DECL_TRANSPARENT_UNION (parm)
4366 || (TREE_CODE (passed_type) == UNION_TYPE
4367 && TYPE_TRANSPARENT_UNION (passed_type)))
4368 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4370 /* See if this arg was passed by invisible reference. It is if
4371 it is an object whose size depends on the contents of the
4372 object itself or if the machine requires these objects be passed
4373 that way. */
4375 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4376 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4377 || TREE_ADDRESSABLE (passed_type)
4378 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4379 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4380 passed_type, named_arg)
4381 #endif
4384 passed_type = nominal_type = build_pointer_type (passed_type);
4385 passed_pointer = 1;
4386 passed_mode = nominal_mode = Pmode;
4389 promoted_mode = passed_mode;
4391 #ifdef PROMOTE_FUNCTION_ARGS
4392 /* Compute the mode in which the arg is actually extended to. */
4393 unsignedp = TREE_UNSIGNED (passed_type);
4394 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4395 #endif
4397 /* Let machine desc say which reg (if any) the parm arrives in.
4398 0 means it arrives on the stack. */
4399 #ifdef FUNCTION_INCOMING_ARG
4400 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4401 passed_type, named_arg);
4402 #else
4403 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4404 passed_type, named_arg);
4405 #endif
4407 if (entry_parm == 0)
4408 promoted_mode = passed_mode;
4410 #ifdef SETUP_INCOMING_VARARGS
4411 /* If this is the last named parameter, do any required setup for
4412 varargs or stdargs. We need to know about the case of this being an
4413 addressable type, in which case we skip the registers it
4414 would have arrived in.
4416 For stdargs, LAST_NAMED will be set for two parameters, the one that
4417 is actually the last named, and the dummy parameter. We only
4418 want to do this action once.
4420 Also, indicate when RTL generation is to be suppressed. */
4421 if (last_named && !varargs_setup)
4423 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4424 current_function_pretend_args_size, 0);
4425 varargs_setup = 1;
4427 #endif
4429 /* Determine parm's home in the stack,
4430 in case it arrives in the stack or we should pretend it did.
4432 Compute the stack position and rtx where the argument arrives
4433 and its size.
4435 There is one complexity here: If this was a parameter that would
4436 have been passed in registers, but wasn't only because it is
4437 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4438 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4439 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4440 0 as it was the previous time. */
4442 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4443 locate_and_pad_parm (promoted_mode, passed_type,
4444 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4446 #else
4447 #ifdef FUNCTION_INCOMING_ARG
4448 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4449 passed_type,
4450 pretend_named) != 0,
4451 #else
4452 FUNCTION_ARG (args_so_far, promoted_mode,
4453 passed_type,
4454 pretend_named) != 0,
4455 #endif
4456 #endif
4457 fndecl, &stack_args_size, &stack_offset, &arg_size,
4458 &alignment_pad);
4461 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4463 if (offset_rtx == const0_rtx)
4464 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4465 else
4466 stack_parm = gen_rtx_MEM (promoted_mode,
4467 gen_rtx_PLUS (Pmode,
4468 internal_arg_pointer,
4469 offset_rtx));
4471 set_mem_attributes (stack_parm, parm, 1);
4474 /* If this parameter was passed both in registers and in the stack,
4475 use the copy on the stack. */
4476 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4477 entry_parm = 0;
4479 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4480 /* If this parm was passed part in regs and part in memory,
4481 pretend it arrived entirely in memory
4482 by pushing the register-part onto the stack.
4484 In the special case of a DImode or DFmode that is split,
4485 we could put it together in a pseudoreg directly,
4486 but for now that's not worth bothering with. */
4488 if (entry_parm)
4490 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4491 passed_type, named_arg);
4493 if (nregs > 0)
4495 current_function_pretend_args_size
4496 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4497 / (PARM_BOUNDARY / BITS_PER_UNIT)
4498 * (PARM_BOUNDARY / BITS_PER_UNIT));
4500 /* Handle calls that pass values in multiple non-contiguous
4501 locations. The Irix 6 ABI has examples of this. */
4502 if (GET_CODE (entry_parm) == PARALLEL)
4503 emit_group_store (validize_mem (stack_parm), entry_parm,
4504 int_size_in_bytes (TREE_TYPE (parm)));
4506 else
4507 move_block_from_reg (REGNO (entry_parm),
4508 validize_mem (stack_parm), nregs,
4509 int_size_in_bytes (TREE_TYPE (parm)));
4511 entry_parm = stack_parm;
4514 #endif
4516 /* If we didn't decide this parm came in a register,
4517 by default it came on the stack. */
4518 if (entry_parm == 0)
4519 entry_parm = stack_parm;
4521 /* Record permanently how this parm was passed. */
4522 DECL_INCOMING_RTL (parm) = entry_parm;
4524 /* If there is actually space on the stack for this parm,
4525 count it in stack_args_size; otherwise set stack_parm to 0
4526 to indicate there is no preallocated stack slot for the parm. */
4528 if (entry_parm == stack_parm
4529 || (GET_CODE (entry_parm) == PARALLEL
4530 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4531 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4532 /* On some machines, even if a parm value arrives in a register
4533 there is still an (uninitialized) stack slot allocated for it.
4535 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4536 whether this parameter already has a stack slot allocated,
4537 because an arg block exists only if current_function_args_size
4538 is larger than some threshold, and we haven't calculated that
4539 yet. So, for now, we just assume that stack slots never exist
4540 in this case. */
4541 || REG_PARM_STACK_SPACE (fndecl) > 0
4542 #endif
4545 stack_args_size.constant += arg_size.constant;
4546 if (arg_size.var)
4547 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4549 else
4550 /* No stack slot was pushed for this parm. */
4551 stack_parm = 0;
4553 /* Update info on where next arg arrives in registers. */
4555 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4556 passed_type, named_arg);
4558 /* If we can't trust the parm stack slot to be aligned enough
4559 for its ultimate type, don't use that slot after entry.
4560 We'll make another stack slot, if we need one. */
4562 unsigned int thisparm_boundary
4563 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4565 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4566 stack_parm = 0;
4569 /* If parm was passed in memory, and we need to convert it on entry,
4570 don't store it back in that same slot. */
4571 if (entry_parm != 0
4572 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4573 stack_parm = 0;
4575 /* When an argument is passed in multiple locations, we can't
4576 make use of this information, but we can save some copying if
4577 the whole argument is passed in a single register. */
4578 if (GET_CODE (entry_parm) == PARALLEL
4579 && nominal_mode != BLKmode && passed_mode != BLKmode)
4581 int i, len = XVECLEN (entry_parm, 0);
4583 for (i = 0; i < len; i++)
4584 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4585 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4586 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4587 == passed_mode)
4588 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4590 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4591 DECL_INCOMING_RTL (parm) = entry_parm;
4592 break;
4596 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4597 in the mode in which it arrives.
4598 STACK_PARM is an RTX for a stack slot where the parameter can live
4599 during the function (in case we want to put it there).
4600 STACK_PARM is 0 if no stack slot was pushed for it.
4602 Now output code if necessary to convert ENTRY_PARM to
4603 the type in which this function declares it,
4604 and store that result in an appropriate place,
4605 which may be a pseudo reg, may be STACK_PARM,
4606 or may be a local stack slot if STACK_PARM is 0.
4608 Set DECL_RTL to that place. */
4610 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4612 /* If a BLKmode arrives in registers, copy it to a stack slot.
4613 Handle calls that pass values in multiple non-contiguous
4614 locations. The Irix 6 ABI has examples of this. */
4615 if (GET_CODE (entry_parm) == REG
4616 || GET_CODE (entry_parm) == PARALLEL)
4618 int size_stored
4619 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4620 UNITS_PER_WORD);
4622 /* Note that we will be storing an integral number of words.
4623 So we have to be careful to ensure that we allocate an
4624 integral number of words. We do this below in the
4625 assign_stack_local if space was not allocated in the argument
4626 list. If it was, this will not work if PARM_BOUNDARY is not
4627 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4628 if it becomes a problem. */
4630 if (stack_parm == 0)
4632 stack_parm
4633 = assign_stack_local (GET_MODE (entry_parm),
4634 size_stored, 0);
4635 set_mem_attributes (stack_parm, parm, 1);
4638 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4639 abort ();
4641 /* Handle calls that pass values in multiple non-contiguous
4642 locations. The Irix 6 ABI has examples of this. */
4643 if (GET_CODE (entry_parm) == PARALLEL)
4644 emit_group_store (validize_mem (stack_parm), entry_parm,
4645 int_size_in_bytes (TREE_TYPE (parm)));
4646 else
4647 move_block_from_reg (REGNO (entry_parm),
4648 validize_mem (stack_parm),
4649 size_stored / UNITS_PER_WORD,
4650 int_size_in_bytes (TREE_TYPE (parm)));
4652 SET_DECL_RTL (parm, stack_parm);
4654 else if (! ((! optimize
4655 && ! DECL_REGISTER (parm))
4656 || TREE_SIDE_EFFECTS (parm)
4657 /* If -ffloat-store specified, don't put explicit
4658 float variables into registers. */
4659 || (flag_float_store
4660 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4661 /* Always assign pseudo to structure return or item passed
4662 by invisible reference. */
4663 || passed_pointer || parm == function_result_decl)
4665 /* Store the parm in a pseudoregister during the function, but we
4666 may need to do it in a wider mode. */
4668 rtx parmreg;
4669 unsigned int regno, regnoi = 0, regnor = 0;
4671 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4673 promoted_nominal_mode
4674 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4676 parmreg = gen_reg_rtx (promoted_nominal_mode);
4677 mark_user_reg (parmreg);
4679 /* If this was an item that we received a pointer to, set DECL_RTL
4680 appropriately. */
4681 if (passed_pointer)
4683 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4684 parmreg);
4685 set_mem_attributes (x, parm, 1);
4686 SET_DECL_RTL (parm, x);
4688 else
4690 SET_DECL_RTL (parm, parmreg);
4691 maybe_set_unchanging (DECL_RTL (parm), parm);
4694 /* Copy the value into the register. */
4695 if (nominal_mode != passed_mode
4696 || promoted_nominal_mode != promoted_mode)
4698 int save_tree_used;
4699 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4700 mode, by the caller. We now have to convert it to
4701 NOMINAL_MODE, if different. However, PARMREG may be in
4702 a different mode than NOMINAL_MODE if it is being stored
4703 promoted.
4705 If ENTRY_PARM is a hard register, it might be in a register
4706 not valid for operating in its mode (e.g., an odd-numbered
4707 register for a DFmode). In that case, moves are the only
4708 thing valid, so we can't do a convert from there. This
4709 occurs when the calling sequence allow such misaligned
4710 usages.
4712 In addition, the conversion may involve a call, which could
4713 clobber parameters which haven't been copied to pseudo
4714 registers yet. Therefore, we must first copy the parm to
4715 a pseudo reg here, and save the conversion until after all
4716 parameters have been moved. */
4718 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4720 emit_move_insn (tempreg, validize_mem (entry_parm));
4722 push_to_sequence (conversion_insns);
4723 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4725 if (GET_CODE (tempreg) == SUBREG
4726 && GET_MODE (tempreg) == nominal_mode
4727 && GET_CODE (SUBREG_REG (tempreg)) == REG
4728 && nominal_mode == passed_mode
4729 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4730 && GET_MODE_SIZE (GET_MODE (tempreg))
4731 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4733 /* The argument is already sign/zero extended, so note it
4734 into the subreg. */
4735 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4736 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4739 /* TREE_USED gets set erroneously during expand_assignment. */
4740 save_tree_used = TREE_USED (parm);
4741 expand_assignment (parm,
4742 make_tree (nominal_type, tempreg), 0, 0);
4743 TREE_USED (parm) = save_tree_used;
4744 conversion_insns = get_insns ();
4745 did_conversion = 1;
4746 end_sequence ();
4748 else
4749 emit_move_insn (parmreg, validize_mem (entry_parm));
4751 /* If we were passed a pointer but the actual value
4752 can safely live in a register, put it in one. */
4753 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4754 /* If by-reference argument was promoted, demote it. */
4755 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4756 || ! ((! optimize
4757 && ! DECL_REGISTER (parm))
4758 || TREE_SIDE_EFFECTS (parm)
4759 /* If -ffloat-store specified, don't put explicit
4760 float variables into registers. */
4761 || (flag_float_store
4762 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4764 /* We can't use nominal_mode, because it will have been set to
4765 Pmode above. We must use the actual mode of the parm. */
4766 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4767 mark_user_reg (parmreg);
4768 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4770 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4771 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4772 push_to_sequence (conversion_insns);
4773 emit_move_insn (tempreg, DECL_RTL (parm));
4774 SET_DECL_RTL (parm,
4775 convert_to_mode (GET_MODE (parmreg),
4776 tempreg,
4777 unsigned_p));
4778 emit_move_insn (parmreg, DECL_RTL (parm));
4779 conversion_insns = get_insns();
4780 did_conversion = 1;
4781 end_sequence ();
4783 else
4784 emit_move_insn (parmreg, DECL_RTL (parm));
4785 SET_DECL_RTL (parm, parmreg);
4786 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4787 now the parm. */
4788 stack_parm = 0;
4790 #ifdef FUNCTION_ARG_CALLEE_COPIES
4791 /* If we are passed an arg by reference and it is our responsibility
4792 to make a copy, do it now.
4793 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4794 original argument, so we must recreate them in the call to
4795 FUNCTION_ARG_CALLEE_COPIES. */
4796 /* ??? Later add code to handle the case that if the argument isn't
4797 modified, don't do the copy. */
4799 else if (passed_pointer
4800 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4801 TYPE_MODE (DECL_ARG_TYPE (parm)),
4802 DECL_ARG_TYPE (parm),
4803 named_arg)
4804 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4806 rtx copy;
4807 tree type = DECL_ARG_TYPE (parm);
4809 /* This sequence may involve a library call perhaps clobbering
4810 registers that haven't been copied to pseudos yet. */
4812 push_to_sequence (conversion_insns);
4814 if (!COMPLETE_TYPE_P (type)
4815 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4816 /* This is a variable sized object. */
4817 copy = gen_rtx_MEM (BLKmode,
4818 allocate_dynamic_stack_space
4819 (expr_size (parm), NULL_RTX,
4820 TYPE_ALIGN (type)));
4821 else
4822 copy = assign_stack_temp (TYPE_MODE (type),
4823 int_size_in_bytes (type), 1);
4824 set_mem_attributes (copy, parm, 1);
4826 store_expr (parm, copy, 0);
4827 emit_move_insn (parmreg, XEXP (copy, 0));
4828 conversion_insns = get_insns ();
4829 did_conversion = 1;
4830 end_sequence ();
4832 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4834 /* In any case, record the parm's desired stack location
4835 in case we later discover it must live in the stack.
4837 If it is a COMPLEX value, store the stack location for both
4838 halves. */
4840 if (GET_CODE (parmreg) == CONCAT)
4841 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4842 else
4843 regno = REGNO (parmreg);
4845 if (regno >= max_parm_reg)
4847 rtx *new;
4848 int old_max_parm_reg = max_parm_reg;
4850 /* It's slow to expand this one register at a time,
4851 but it's also rare and we need max_parm_reg to be
4852 precisely correct. */
4853 max_parm_reg = regno + 1;
4854 new = (rtx *) xrealloc (parm_reg_stack_loc,
4855 max_parm_reg * sizeof (rtx));
4856 memset ((char *) (new + old_max_parm_reg), 0,
4857 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4858 parm_reg_stack_loc = new;
4861 if (GET_CODE (parmreg) == CONCAT)
4863 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4865 regnor = REGNO (gen_realpart (submode, parmreg));
4866 regnoi = REGNO (gen_imagpart (submode, parmreg));
4868 if (stack_parm != 0)
4870 parm_reg_stack_loc[regnor]
4871 = gen_realpart (submode, stack_parm);
4872 parm_reg_stack_loc[regnoi]
4873 = gen_imagpart (submode, stack_parm);
4875 else
4877 parm_reg_stack_loc[regnor] = 0;
4878 parm_reg_stack_loc[regnoi] = 0;
4881 else
4882 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4884 /* Mark the register as eliminable if we did no conversion
4885 and it was copied from memory at a fixed offset,
4886 and the arg pointer was not copied to a pseudo-reg.
4887 If the arg pointer is a pseudo reg or the offset formed
4888 an invalid address, such memory-equivalences
4889 as we make here would screw up life analysis for it. */
4890 if (nominal_mode == passed_mode
4891 && ! did_conversion
4892 && stack_parm != 0
4893 && GET_CODE (stack_parm) == MEM
4894 && stack_offset.var == 0
4895 && reg_mentioned_p (virtual_incoming_args_rtx,
4896 XEXP (stack_parm, 0)))
4898 rtx linsn = get_last_insn ();
4899 rtx sinsn, set;
4901 /* Mark complex types separately. */
4902 if (GET_CODE (parmreg) == CONCAT)
4903 /* Scan backwards for the set of the real and
4904 imaginary parts. */
4905 for (sinsn = linsn; sinsn != 0;
4906 sinsn = prev_nonnote_insn (sinsn))
4908 set = single_set (sinsn);
4909 if (set != 0
4910 && SET_DEST (set) == regno_reg_rtx [regnoi])
4911 REG_NOTES (sinsn)
4912 = gen_rtx_EXPR_LIST (REG_EQUIV,
4913 parm_reg_stack_loc[regnoi],
4914 REG_NOTES (sinsn));
4915 else if (set != 0
4916 && SET_DEST (set) == regno_reg_rtx [regnor])
4917 REG_NOTES (sinsn)
4918 = gen_rtx_EXPR_LIST (REG_EQUIV,
4919 parm_reg_stack_loc[regnor],
4920 REG_NOTES (sinsn));
4922 else if ((set = single_set (linsn)) != 0
4923 && SET_DEST (set) == parmreg)
4924 REG_NOTES (linsn)
4925 = gen_rtx_EXPR_LIST (REG_EQUIV,
4926 stack_parm, REG_NOTES (linsn));
4929 /* For pointer data type, suggest pointer register. */
4930 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4931 mark_reg_pointer (parmreg,
4932 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4934 /* If something wants our address, try to use ADDRESSOF. */
4935 if (TREE_ADDRESSABLE (parm))
4937 /* If we end up putting something into the stack,
4938 fixup_var_refs_insns will need to make a pass over
4939 all the instructions. It looks through the pending
4940 sequences -- but it can't see the ones in the
4941 CONVERSION_INSNS, if they're not on the sequence
4942 stack. So, we go back to that sequence, just so that
4943 the fixups will happen. */
4944 push_to_sequence (conversion_insns);
4945 put_var_into_stack (parm);
4946 conversion_insns = get_insns ();
4947 end_sequence ();
4950 else
4952 /* Value must be stored in the stack slot STACK_PARM
4953 during function execution. */
4955 if (promoted_mode != nominal_mode)
4957 /* Conversion is required. */
4958 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4960 emit_move_insn (tempreg, validize_mem (entry_parm));
4962 push_to_sequence (conversion_insns);
4963 entry_parm = convert_to_mode (nominal_mode, tempreg,
4964 TREE_UNSIGNED (TREE_TYPE (parm)));
4965 if (stack_parm)
4966 /* ??? This may need a big-endian conversion on sparc64. */
4967 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
4969 conversion_insns = get_insns ();
4970 did_conversion = 1;
4971 end_sequence ();
4974 if (entry_parm != stack_parm)
4976 if (stack_parm == 0)
4978 stack_parm
4979 = assign_stack_local (GET_MODE (entry_parm),
4980 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4981 set_mem_attributes (stack_parm, parm, 1);
4984 if (promoted_mode != nominal_mode)
4986 push_to_sequence (conversion_insns);
4987 emit_move_insn (validize_mem (stack_parm),
4988 validize_mem (entry_parm));
4989 conversion_insns = get_insns ();
4990 end_sequence ();
4992 else
4993 emit_move_insn (validize_mem (stack_parm),
4994 validize_mem (entry_parm));
4997 SET_DECL_RTL (parm, stack_parm);
5000 /* If this "parameter" was the place where we are receiving the
5001 function's incoming structure pointer, set up the result. */
5002 if (parm == function_result_decl)
5004 tree result = DECL_RESULT (fndecl);
5005 rtx addr = DECL_RTL (parm);
5006 rtx x;
5008 #ifdef POINTERS_EXTEND_UNSIGNED
5009 if (GET_MODE (addr) != Pmode)
5010 addr = convert_memory_address (Pmode, addr);
5011 #endif
5013 x = gen_rtx_MEM (DECL_MODE (result), addr);
5014 set_mem_attributes (x, result, 1);
5015 SET_DECL_RTL (result, x);
5018 if (GET_CODE (DECL_RTL (parm)) == REG)
5019 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5020 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5022 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5023 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5028 /* Output all parameter conversion instructions (possibly including calls)
5029 now that all parameters have been copied out of hard registers. */
5030 emit_insns (conversion_insns);
5032 last_parm_insn = get_last_insn ();
5034 current_function_args_size = stack_args_size.constant;
5036 /* Adjust function incoming argument size for alignment and
5037 minimum length. */
5039 #ifdef REG_PARM_STACK_SPACE
5040 #ifndef MAYBE_REG_PARM_STACK_SPACE
5041 current_function_args_size = MAX (current_function_args_size,
5042 REG_PARM_STACK_SPACE (fndecl));
5043 #endif
5044 #endif
5046 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5048 current_function_args_size
5049 = ((current_function_args_size + STACK_BYTES - 1)
5050 / STACK_BYTES) * STACK_BYTES;
5052 #ifdef ARGS_GROW_DOWNWARD
5053 current_function_arg_offset_rtx
5054 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5055 : expand_expr (size_diffop (stack_args_size.var,
5056 size_int (-stack_args_size.constant)),
5057 NULL_RTX, VOIDmode, 0));
5058 #else
5059 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5060 #endif
5062 /* See how many bytes, if any, of its args a function should try to pop
5063 on return. */
5065 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5066 current_function_args_size);
5068 /* For stdarg.h function, save info about
5069 regs and stack space used by the named args. */
5071 if (!hide_last_arg)
5072 current_function_args_info = args_so_far;
5074 /* Set the rtx used for the function return value. Put this in its
5075 own variable so any optimizers that need this information don't have
5076 to include tree.h. Do this here so it gets done when an inlined
5077 function gets output. */
5079 current_function_return_rtx
5080 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5081 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5084 /* Indicate whether REGNO is an incoming argument to the current function
5085 that was promoted to a wider mode. If so, return the RTX for the
5086 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5087 that REGNO is promoted from and whether the promotion was signed or
5088 unsigned. */
5090 #ifdef PROMOTE_FUNCTION_ARGS
5093 promoted_input_arg (regno, pmode, punsignedp)
5094 unsigned int regno;
5095 enum machine_mode *pmode;
5096 int *punsignedp;
5098 tree arg;
5100 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5101 arg = TREE_CHAIN (arg))
5102 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5103 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5104 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5107 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5109 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5110 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5111 && mode != DECL_MODE (arg))
5113 *pmode = DECL_MODE (arg);
5114 *punsignedp = unsignedp;
5115 return DECL_INCOMING_RTL (arg);
5119 return 0;
5122 #endif
5124 /* Compute the size and offset from the start of the stacked arguments for a
5125 parm passed in mode PASSED_MODE and with type TYPE.
5127 INITIAL_OFFSET_PTR points to the current offset into the stacked
5128 arguments.
5130 The starting offset and size for this parm are returned in *OFFSET_PTR
5131 and *ARG_SIZE_PTR, respectively.
5133 IN_REGS is non-zero if the argument will be passed in registers. It will
5134 never be set if REG_PARM_STACK_SPACE is not defined.
5136 FNDECL is the function in which the argument was defined.
5138 There are two types of rounding that are done. The first, controlled by
5139 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5140 list to be aligned to the specific boundary (in bits). This rounding
5141 affects the initial and starting offsets, but not the argument size.
5143 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5144 optionally rounds the size of the parm to PARM_BOUNDARY. The
5145 initial offset is not affected by this rounding, while the size always
5146 is and the starting offset may be. */
5148 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5149 initial_offset_ptr is positive because locate_and_pad_parm's
5150 callers pass in the total size of args so far as
5151 initial_offset_ptr. arg_size_ptr is always positive. */
5153 void
5154 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5155 initial_offset_ptr, offset_ptr, arg_size_ptr,
5156 alignment_pad)
5157 enum machine_mode passed_mode;
5158 tree type;
5159 int in_regs ATTRIBUTE_UNUSED;
5160 tree fndecl ATTRIBUTE_UNUSED;
5161 struct args_size *initial_offset_ptr;
5162 struct args_size *offset_ptr;
5163 struct args_size *arg_size_ptr;
5164 struct args_size *alignment_pad;
5167 tree sizetree
5168 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5169 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5170 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5172 #ifdef REG_PARM_STACK_SPACE
5173 /* If we have found a stack parm before we reach the end of the
5174 area reserved for registers, skip that area. */
5175 if (! in_regs)
5177 int reg_parm_stack_space = 0;
5179 #ifdef MAYBE_REG_PARM_STACK_SPACE
5180 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5181 #else
5182 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5183 #endif
5184 if (reg_parm_stack_space > 0)
5186 if (initial_offset_ptr->var)
5188 initial_offset_ptr->var
5189 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5190 ssize_int (reg_parm_stack_space));
5191 initial_offset_ptr->constant = 0;
5193 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5194 initial_offset_ptr->constant = reg_parm_stack_space;
5197 #endif /* REG_PARM_STACK_SPACE */
5199 arg_size_ptr->var = 0;
5200 arg_size_ptr->constant = 0;
5201 alignment_pad->var = 0;
5202 alignment_pad->constant = 0;
5204 #ifdef ARGS_GROW_DOWNWARD
5205 if (initial_offset_ptr->var)
5207 offset_ptr->constant = 0;
5208 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5209 initial_offset_ptr->var);
5211 else
5213 offset_ptr->constant = -initial_offset_ptr->constant;
5214 offset_ptr->var = 0;
5216 if (where_pad != none
5217 && (!host_integerp (sizetree, 1)
5218 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5219 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5220 SUB_PARM_SIZE (*offset_ptr, sizetree);
5221 if (where_pad != downward)
5222 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5223 if (initial_offset_ptr->var)
5224 arg_size_ptr->var = size_binop (MINUS_EXPR,
5225 size_binop (MINUS_EXPR,
5226 ssize_int (0),
5227 initial_offset_ptr->var),
5228 offset_ptr->var);
5230 else
5231 arg_size_ptr->constant = (-initial_offset_ptr->constant
5232 - offset_ptr->constant);
5234 #else /* !ARGS_GROW_DOWNWARD */
5235 if (!in_regs
5236 #ifdef REG_PARM_STACK_SPACE
5237 || REG_PARM_STACK_SPACE (fndecl) > 0
5238 #endif
5240 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5241 *offset_ptr = *initial_offset_ptr;
5243 #ifdef PUSH_ROUNDING
5244 if (passed_mode != BLKmode)
5245 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5246 #endif
5248 /* Pad_below needs the pre-rounded size to know how much to pad below
5249 so this must be done before rounding up. */
5250 if (where_pad == downward
5251 /* However, BLKmode args passed in regs have their padding done elsewhere.
5252 The stack slot must be able to hold the entire register. */
5253 && !(in_regs && passed_mode == BLKmode))
5254 pad_below (offset_ptr, passed_mode, sizetree);
5256 if (where_pad != none
5257 && (!host_integerp (sizetree, 1)
5258 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5259 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5261 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5262 #endif /* ARGS_GROW_DOWNWARD */
5265 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5266 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5268 static void
5269 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5270 struct args_size *offset_ptr;
5271 int boundary;
5272 struct args_size *alignment_pad;
5274 tree save_var = NULL_TREE;
5275 HOST_WIDE_INT save_constant = 0;
5277 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5279 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5281 save_var = offset_ptr->var;
5282 save_constant = offset_ptr->constant;
5285 alignment_pad->var = NULL_TREE;
5286 alignment_pad->constant = 0;
5288 if (boundary > BITS_PER_UNIT)
5290 if (offset_ptr->var)
5292 offset_ptr->var =
5293 #ifdef ARGS_GROW_DOWNWARD
5294 round_down
5295 #else
5296 round_up
5297 #endif
5298 (ARGS_SIZE_TREE (*offset_ptr),
5299 boundary / BITS_PER_UNIT);
5300 offset_ptr->constant = 0; /*?*/
5301 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5302 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5303 save_var);
5305 else
5307 offset_ptr->constant =
5308 #ifdef ARGS_GROW_DOWNWARD
5309 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5310 #else
5311 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5312 #endif
5313 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5314 alignment_pad->constant = offset_ptr->constant - save_constant;
5319 #ifndef ARGS_GROW_DOWNWARD
5320 static void
5321 pad_below (offset_ptr, passed_mode, sizetree)
5322 struct args_size *offset_ptr;
5323 enum machine_mode passed_mode;
5324 tree sizetree;
5326 if (passed_mode != BLKmode)
5328 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5329 offset_ptr->constant
5330 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5331 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5332 - GET_MODE_SIZE (passed_mode));
5334 else
5336 if (TREE_CODE (sizetree) != INTEGER_CST
5337 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5339 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5340 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5341 /* Add it in. */
5342 ADD_PARM_SIZE (*offset_ptr, s2);
5343 SUB_PARM_SIZE (*offset_ptr, sizetree);
5347 #endif
5349 /* Walk the tree of blocks describing the binding levels within a function
5350 and warn about uninitialized variables.
5351 This is done after calling flow_analysis and before global_alloc
5352 clobbers the pseudo-regs to hard regs. */
5354 void
5355 uninitialized_vars_warning (block)
5356 tree block;
5358 tree decl, sub;
5359 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5361 if (warn_uninitialized
5362 && TREE_CODE (decl) == VAR_DECL
5363 /* These warnings are unreliable for and aggregates
5364 because assigning the fields one by one can fail to convince
5365 flow.c that the entire aggregate was initialized.
5366 Unions are troublesome because members may be shorter. */
5367 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5368 && DECL_RTL (decl) != 0
5369 && GET_CODE (DECL_RTL (decl)) == REG
5370 /* Global optimizations can make it difficult to determine if a
5371 particular variable has been initialized. However, a VAR_DECL
5372 with a nonzero DECL_INITIAL had an initializer, so do not
5373 claim it is potentially uninitialized.
5375 We do not care about the actual value in DECL_INITIAL, so we do
5376 not worry that it may be a dangling pointer. */
5377 && DECL_INITIAL (decl) == NULL_TREE
5378 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5379 warning_with_decl (decl,
5380 "`%s' might be used uninitialized in this function");
5381 if (extra_warnings
5382 && TREE_CODE (decl) == VAR_DECL
5383 && DECL_RTL (decl) != 0
5384 && GET_CODE (DECL_RTL (decl)) == REG
5385 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5386 warning_with_decl (decl,
5387 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5389 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5390 uninitialized_vars_warning (sub);
5393 /* Do the appropriate part of uninitialized_vars_warning
5394 but for arguments instead of local variables. */
5396 void
5397 setjmp_args_warning ()
5399 tree decl;
5400 for (decl = DECL_ARGUMENTS (current_function_decl);
5401 decl; decl = TREE_CHAIN (decl))
5402 if (DECL_RTL (decl) != 0
5403 && GET_CODE (DECL_RTL (decl)) == REG
5404 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5405 warning_with_decl (decl,
5406 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5409 /* If this function call setjmp, put all vars into the stack
5410 unless they were declared `register'. */
5412 void
5413 setjmp_protect (block)
5414 tree block;
5416 tree decl, sub;
5417 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5418 if ((TREE_CODE (decl) == VAR_DECL
5419 || TREE_CODE (decl) == PARM_DECL)
5420 && DECL_RTL (decl) != 0
5421 && (GET_CODE (DECL_RTL (decl)) == REG
5422 || (GET_CODE (DECL_RTL (decl)) == MEM
5423 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5424 /* If this variable came from an inline function, it must be
5425 that its life doesn't overlap the setjmp. If there was a
5426 setjmp in the function, it would already be in memory. We
5427 must exclude such variable because their DECL_RTL might be
5428 set to strange things such as virtual_stack_vars_rtx. */
5429 && ! DECL_FROM_INLINE (decl)
5430 && (
5431 #ifdef NON_SAVING_SETJMP
5432 /* If longjmp doesn't restore the registers,
5433 don't put anything in them. */
5434 NON_SAVING_SETJMP
5436 #endif
5437 ! DECL_REGISTER (decl)))
5438 put_var_into_stack (decl);
5439 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5440 setjmp_protect (sub);
5443 /* Like the previous function, but for args instead of local variables. */
5445 void
5446 setjmp_protect_args ()
5448 tree decl;
5449 for (decl = DECL_ARGUMENTS (current_function_decl);
5450 decl; decl = TREE_CHAIN (decl))
5451 if ((TREE_CODE (decl) == VAR_DECL
5452 || TREE_CODE (decl) == PARM_DECL)
5453 && DECL_RTL (decl) != 0
5454 && (GET_CODE (DECL_RTL (decl)) == REG
5455 || (GET_CODE (DECL_RTL (decl)) == MEM
5456 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5457 && (
5458 /* If longjmp doesn't restore the registers,
5459 don't put anything in them. */
5460 #ifdef NON_SAVING_SETJMP
5461 NON_SAVING_SETJMP
5463 #endif
5464 ! DECL_REGISTER (decl)))
5465 put_var_into_stack (decl);
5468 /* Return the context-pointer register corresponding to DECL,
5469 or 0 if it does not need one. */
5472 lookup_static_chain (decl)
5473 tree decl;
5475 tree context = decl_function_context (decl);
5476 tree link;
5478 if (context == 0
5479 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5480 return 0;
5482 /* We treat inline_function_decl as an alias for the current function
5483 because that is the inline function whose vars, types, etc.
5484 are being merged into the current function.
5485 See expand_inline_function. */
5486 if (context == current_function_decl || context == inline_function_decl)
5487 return virtual_stack_vars_rtx;
5489 for (link = context_display; link; link = TREE_CHAIN (link))
5490 if (TREE_PURPOSE (link) == context)
5491 return RTL_EXPR_RTL (TREE_VALUE (link));
5493 abort ();
5496 /* Convert a stack slot address ADDR for variable VAR
5497 (from a containing function)
5498 into an address valid in this function (using a static chain). */
5501 fix_lexical_addr (addr, var)
5502 rtx addr;
5503 tree var;
5505 rtx basereg;
5506 HOST_WIDE_INT displacement;
5507 tree context = decl_function_context (var);
5508 struct function *fp;
5509 rtx base = 0;
5511 /* If this is the present function, we need not do anything. */
5512 if (context == current_function_decl || context == inline_function_decl)
5513 return addr;
5515 fp = find_function_data (context);
5517 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5518 addr = XEXP (XEXP (addr, 0), 0);
5520 /* Decode given address as base reg plus displacement. */
5521 if (GET_CODE (addr) == REG)
5522 basereg = addr, displacement = 0;
5523 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5524 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5525 else
5526 abort ();
5528 /* We accept vars reached via the containing function's
5529 incoming arg pointer and via its stack variables pointer. */
5530 if (basereg == fp->internal_arg_pointer)
5532 /* If reached via arg pointer, get the arg pointer value
5533 out of that function's stack frame.
5535 There are two cases: If a separate ap is needed, allocate a
5536 slot in the outer function for it and dereference it that way.
5537 This is correct even if the real ap is actually a pseudo.
5538 Otherwise, just adjust the offset from the frame pointer to
5539 compensate. */
5541 #ifdef NEED_SEPARATE_AP
5542 rtx addr;
5544 addr = get_arg_pointer_save_area (fp);
5545 addr = fix_lexical_addr (XEXP (addr, 0), var);
5546 addr = memory_address (Pmode, addr);
5548 base = gen_rtx_MEM (Pmode, addr);
5549 set_mem_alias_set (base, get_frame_alias_set ());
5550 base = copy_to_reg (base);
5551 #else
5552 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5553 base = lookup_static_chain (var);
5554 #endif
5557 else if (basereg == virtual_stack_vars_rtx)
5559 /* This is the same code as lookup_static_chain, duplicated here to
5560 avoid an extra call to decl_function_context. */
5561 tree link;
5563 for (link = context_display; link; link = TREE_CHAIN (link))
5564 if (TREE_PURPOSE (link) == context)
5566 base = RTL_EXPR_RTL (TREE_VALUE (link));
5567 break;
5571 if (base == 0)
5572 abort ();
5574 /* Use same offset, relative to appropriate static chain or argument
5575 pointer. */
5576 return plus_constant (base, displacement);
5579 /* Return the address of the trampoline for entering nested fn FUNCTION.
5580 If necessary, allocate a trampoline (in the stack frame)
5581 and emit rtl to initialize its contents (at entry to this function). */
5584 trampoline_address (function)
5585 tree function;
5587 tree link;
5588 tree rtlexp;
5589 rtx tramp;
5590 struct function *fp;
5591 tree fn_context;
5593 /* Find an existing trampoline and return it. */
5594 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5595 if (TREE_PURPOSE (link) == function)
5596 return
5597 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5599 for (fp = outer_function_chain; fp; fp = fp->outer)
5600 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5601 if (TREE_PURPOSE (link) == function)
5603 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5604 function);
5605 return adjust_trampoline_addr (tramp);
5608 /* None exists; we must make one. */
5610 /* Find the `struct function' for the function containing FUNCTION. */
5611 fp = 0;
5612 fn_context = decl_function_context (function);
5613 if (fn_context != current_function_decl
5614 && fn_context != inline_function_decl)
5615 fp = find_function_data (fn_context);
5617 /* Allocate run-time space for this trampoline
5618 (usually in the defining function's stack frame). */
5619 #ifdef ALLOCATE_TRAMPOLINE
5620 tramp = ALLOCATE_TRAMPOLINE (fp);
5621 #else
5622 /* If rounding needed, allocate extra space
5623 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5624 #ifdef TRAMPOLINE_ALIGNMENT
5625 #define TRAMPOLINE_REAL_SIZE \
5626 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5627 #else
5628 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5629 #endif
5630 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5631 fp ? fp : cfun);
5632 #endif
5634 /* Record the trampoline for reuse and note it for later initialization
5635 by expand_function_end. */
5636 if (fp != 0)
5638 rtlexp = make_node (RTL_EXPR);
5639 RTL_EXPR_RTL (rtlexp) = tramp;
5640 fp->x_trampoline_list = tree_cons (function, rtlexp,
5641 fp->x_trampoline_list);
5643 else
5645 /* Make the RTL_EXPR node temporary, not momentary, so that the
5646 trampoline_list doesn't become garbage. */
5647 rtlexp = make_node (RTL_EXPR);
5649 RTL_EXPR_RTL (rtlexp) = tramp;
5650 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5653 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5654 return adjust_trampoline_addr (tramp);
5657 /* Given a trampoline address,
5658 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5660 static rtx
5661 round_trampoline_addr (tramp)
5662 rtx tramp;
5664 #ifdef TRAMPOLINE_ALIGNMENT
5665 /* Round address up to desired boundary. */
5666 rtx temp = gen_reg_rtx (Pmode);
5667 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5668 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5670 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5671 temp, 0, OPTAB_LIB_WIDEN);
5672 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5673 temp, 0, OPTAB_LIB_WIDEN);
5674 #endif
5675 return tramp;
5678 /* Given a trampoline address, round it then apply any
5679 platform-specific adjustments so that the result can be used for a
5680 function call . */
5682 static rtx
5683 adjust_trampoline_addr (tramp)
5684 rtx tramp;
5686 tramp = round_trampoline_addr (tramp);
5687 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5688 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5689 #endif
5690 return tramp;
5693 /* Put all this function's BLOCK nodes including those that are chained
5694 onto the first block into a vector, and return it.
5695 Also store in each NOTE for the beginning or end of a block
5696 the index of that block in the vector.
5697 The arguments are BLOCK, the chain of top-level blocks of the function,
5698 and INSNS, the insn chain of the function. */
5700 void
5701 identify_blocks ()
5703 int n_blocks;
5704 tree *block_vector, *last_block_vector;
5705 tree *block_stack;
5706 tree block = DECL_INITIAL (current_function_decl);
5708 if (block == 0)
5709 return;
5711 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5712 depth-first order. */
5713 block_vector = get_block_vector (block, &n_blocks);
5714 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5716 last_block_vector = identify_blocks_1 (get_insns (),
5717 block_vector + 1,
5718 block_vector + n_blocks,
5719 block_stack);
5721 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5722 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5723 if (0 && last_block_vector != block_vector + n_blocks)
5724 abort ();
5726 free (block_vector);
5727 free (block_stack);
5730 /* Subroutine of identify_blocks. Do the block substitution on the
5731 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5733 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5734 BLOCK_VECTOR is incremented for each block seen. */
5736 static tree *
5737 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5738 rtx insns;
5739 tree *block_vector;
5740 tree *end_block_vector;
5741 tree *orig_block_stack;
5743 rtx insn;
5744 tree *block_stack = orig_block_stack;
5746 for (insn = insns; insn; insn = NEXT_INSN (insn))
5748 if (GET_CODE (insn) == NOTE)
5750 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5752 tree b;
5754 /* If there are more block notes than BLOCKs, something
5755 is badly wrong. */
5756 if (block_vector == end_block_vector)
5757 abort ();
5759 b = *block_vector++;
5760 NOTE_BLOCK (insn) = b;
5761 *block_stack++ = b;
5763 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5765 /* If there are more NOTE_INSN_BLOCK_ENDs than
5766 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5767 if (block_stack == orig_block_stack)
5768 abort ();
5770 NOTE_BLOCK (insn) = *--block_stack;
5773 else if (GET_CODE (insn) == CALL_INSN
5774 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5776 rtx cp = PATTERN (insn);
5778 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5779 end_block_vector, block_stack);
5780 if (XEXP (cp, 1))
5781 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5782 end_block_vector, block_stack);
5783 if (XEXP (cp, 2))
5784 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5785 end_block_vector, block_stack);
5789 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5790 something is badly wrong. */
5791 if (block_stack != orig_block_stack)
5792 abort ();
5794 return block_vector;
5797 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5798 and create duplicate blocks. */
5799 /* ??? Need an option to either create block fragments or to create
5800 abstract origin duplicates of a source block. It really depends
5801 on what optimization has been performed. */
5803 void
5804 reorder_blocks ()
5806 tree block = DECL_INITIAL (current_function_decl);
5807 varray_type block_stack;
5809 if (block == NULL_TREE)
5810 return;
5812 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5814 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5815 reorder_blocks_0 (block);
5817 /* Prune the old trees away, so that they don't get in the way. */
5818 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5819 BLOCK_CHAIN (block) = NULL_TREE;
5821 /* Recreate the block tree from the note nesting. */
5822 reorder_blocks_1 (get_insns (), block, &block_stack);
5823 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5825 /* Remove deleted blocks from the block fragment chains. */
5826 reorder_fix_fragments (block);
5828 VARRAY_FREE (block_stack);
5831 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5833 static void
5834 reorder_blocks_0 (block)
5835 tree block;
5837 while (block)
5839 TREE_ASM_WRITTEN (block) = 0;
5840 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5841 block = BLOCK_CHAIN (block);
5845 static void
5846 reorder_blocks_1 (insns, current_block, p_block_stack)
5847 rtx insns;
5848 tree current_block;
5849 varray_type *p_block_stack;
5851 rtx insn;
5853 for (insn = insns; insn; insn = NEXT_INSN (insn))
5855 if (GET_CODE (insn) == NOTE)
5857 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5859 tree block = NOTE_BLOCK (insn);
5861 /* If we have seen this block before, that means it now
5862 spans multiple address regions. Create a new fragment. */
5863 if (TREE_ASM_WRITTEN (block))
5865 tree new_block = copy_node (block);
5866 tree origin;
5868 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5869 ? BLOCK_FRAGMENT_ORIGIN (block)
5870 : block);
5871 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5872 BLOCK_FRAGMENT_CHAIN (new_block)
5873 = BLOCK_FRAGMENT_CHAIN (origin);
5874 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5876 NOTE_BLOCK (insn) = new_block;
5877 block = new_block;
5880 BLOCK_SUBBLOCKS (block) = 0;
5881 TREE_ASM_WRITTEN (block) = 1;
5882 BLOCK_SUPERCONTEXT (block) = current_block;
5883 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5884 BLOCK_SUBBLOCKS (current_block) = block;
5885 current_block = block;
5886 VARRAY_PUSH_TREE (*p_block_stack, block);
5888 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5890 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5891 VARRAY_POP (*p_block_stack);
5892 BLOCK_SUBBLOCKS (current_block)
5893 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5894 current_block = BLOCK_SUPERCONTEXT (current_block);
5897 else if (GET_CODE (insn) == CALL_INSN
5898 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5900 rtx cp = PATTERN (insn);
5901 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5902 if (XEXP (cp, 1))
5903 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5904 if (XEXP (cp, 2))
5905 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5910 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5911 appears in the block tree, select one of the fragments to become
5912 the new origin block. */
5914 static void
5915 reorder_fix_fragments (block)
5916 tree block;
5918 while (block)
5920 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5921 tree new_origin = NULL_TREE;
5923 if (dup_origin)
5925 if (! TREE_ASM_WRITTEN (dup_origin))
5927 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
5929 /* Find the first of the remaining fragments. There must
5930 be at least one -- the current block. */
5931 while (! TREE_ASM_WRITTEN (new_origin))
5932 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
5933 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
5936 else if (! dup_origin)
5937 new_origin = block;
5939 /* Re-root the rest of the fragments to the new origin. In the
5940 case that DUP_ORIGIN was null, that means BLOCK was the origin
5941 of a chain of fragments and we want to remove those fragments
5942 that didn't make it to the output. */
5943 if (new_origin)
5945 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
5946 tree chain = *pp;
5948 while (chain)
5950 if (TREE_ASM_WRITTEN (chain))
5952 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
5953 *pp = chain;
5954 pp = &BLOCK_FRAGMENT_CHAIN (chain);
5956 chain = BLOCK_FRAGMENT_CHAIN (chain);
5958 *pp = NULL_TREE;
5961 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
5962 block = BLOCK_CHAIN (block);
5966 /* Reverse the order of elements in the chain T of blocks,
5967 and return the new head of the chain (old last element). */
5969 static tree
5970 blocks_nreverse (t)
5971 tree t;
5973 tree prev = 0, decl, next;
5974 for (decl = t; decl; decl = next)
5976 next = BLOCK_CHAIN (decl);
5977 BLOCK_CHAIN (decl) = prev;
5978 prev = decl;
5980 return prev;
5983 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5984 non-NULL, list them all into VECTOR, in a depth-first preorder
5985 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5986 blocks. */
5988 static int
5989 all_blocks (block, vector)
5990 tree block;
5991 tree *vector;
5993 int n_blocks = 0;
5995 while (block)
5997 TREE_ASM_WRITTEN (block) = 0;
5999 /* Record this block. */
6000 if (vector)
6001 vector[n_blocks] = block;
6003 ++n_blocks;
6005 /* Record the subblocks, and their subblocks... */
6006 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6007 vector ? vector + n_blocks : 0);
6008 block = BLOCK_CHAIN (block);
6011 return n_blocks;
6014 /* Return a vector containing all the blocks rooted at BLOCK. The
6015 number of elements in the vector is stored in N_BLOCKS_P. The
6016 vector is dynamically allocated; it is the caller's responsibility
6017 to call `free' on the pointer returned. */
6019 static tree *
6020 get_block_vector (block, n_blocks_p)
6021 tree block;
6022 int *n_blocks_p;
6024 tree *block_vector;
6026 *n_blocks_p = all_blocks (block, NULL);
6027 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6028 all_blocks (block, block_vector);
6030 return block_vector;
6033 static int next_block_index = 2;
6035 /* Set BLOCK_NUMBER for all the blocks in FN. */
6037 void
6038 number_blocks (fn)
6039 tree fn;
6041 int i;
6042 int n_blocks;
6043 tree *block_vector;
6045 /* For SDB and XCOFF debugging output, we start numbering the blocks
6046 from 1 within each function, rather than keeping a running
6047 count. */
6048 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6049 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6050 next_block_index = 1;
6051 #endif
6053 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6055 /* The top-level BLOCK isn't numbered at all. */
6056 for (i = 1; i < n_blocks; ++i)
6057 /* We number the blocks from two. */
6058 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6060 free (block_vector);
6062 return;
6065 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6067 tree
6068 debug_find_var_in_block_tree (var, block)
6069 tree var;
6070 tree block;
6072 tree t;
6074 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6075 if (t == var)
6076 return block;
6078 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6080 tree ret = debug_find_var_in_block_tree (var, t);
6081 if (ret)
6082 return ret;
6085 return NULL_TREE;
6088 /* Allocate a function structure and reset its contents to the defaults. */
6090 static void
6091 prepare_function_start ()
6093 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6095 init_stmt_for_function ();
6096 init_eh_for_function ();
6098 cse_not_expected = ! optimize;
6100 /* Caller save not needed yet. */
6101 caller_save_needed = 0;
6103 /* No stack slots have been made yet. */
6104 stack_slot_list = 0;
6106 current_function_has_nonlocal_label = 0;
6107 current_function_has_nonlocal_goto = 0;
6109 /* There is no stack slot for handling nonlocal gotos. */
6110 nonlocal_goto_handler_slots = 0;
6111 nonlocal_goto_stack_level = 0;
6113 /* No labels have been declared for nonlocal use. */
6114 nonlocal_labels = 0;
6115 nonlocal_goto_handler_labels = 0;
6117 /* No function calls so far in this function. */
6118 function_call_count = 0;
6120 /* No parm regs have been allocated.
6121 (This is important for output_inline_function.) */
6122 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6124 /* Initialize the RTL mechanism. */
6125 init_emit ();
6127 /* Initialize the queue of pending postincrement and postdecrements,
6128 and some other info in expr.c. */
6129 init_expr ();
6131 /* We haven't done register allocation yet. */
6132 reg_renumber = 0;
6134 init_varasm_status (cfun);
6136 /* Clear out data used for inlining. */
6137 cfun->inlinable = 0;
6138 cfun->original_decl_initial = 0;
6139 cfun->original_arg_vector = 0;
6141 cfun->stack_alignment_needed = STACK_BOUNDARY;
6142 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6144 /* Set if a call to setjmp is seen. */
6145 current_function_calls_setjmp = 0;
6147 /* Set if a call to longjmp is seen. */
6148 current_function_calls_longjmp = 0;
6150 current_function_calls_alloca = 0;
6151 current_function_contains_functions = 0;
6152 current_function_is_leaf = 0;
6153 current_function_nothrow = 0;
6154 current_function_sp_is_unchanging = 0;
6155 current_function_uses_only_leaf_regs = 0;
6156 current_function_has_computed_jump = 0;
6157 current_function_is_thunk = 0;
6159 current_function_returns_pcc_struct = 0;
6160 current_function_returns_struct = 0;
6161 current_function_epilogue_delay_list = 0;
6162 current_function_uses_const_pool = 0;
6163 current_function_uses_pic_offset_table = 0;
6164 current_function_cannot_inline = 0;
6166 /* We have not yet needed to make a label to jump to for tail-recursion. */
6167 tail_recursion_label = 0;
6169 /* We haven't had a need to make a save area for ap yet. */
6170 arg_pointer_save_area = 0;
6172 /* No stack slots allocated yet. */
6173 frame_offset = 0;
6175 /* No SAVE_EXPRs in this function yet. */
6176 save_expr_regs = 0;
6178 /* No RTL_EXPRs in this function yet. */
6179 rtl_expr_chain = 0;
6181 /* Set up to allocate temporaries. */
6182 init_temp_slots ();
6184 /* Indicate that we need to distinguish between the return value of the
6185 present function and the return value of a function being called. */
6186 rtx_equal_function_value_matters = 1;
6188 /* Indicate that we have not instantiated virtual registers yet. */
6189 virtuals_instantiated = 0;
6191 /* Indicate that we want CONCATs now. */
6192 generating_concat_p = 1;
6194 /* Indicate we have no need of a frame pointer yet. */
6195 frame_pointer_needed = 0;
6197 /* By default assume not varargs or stdarg. */
6198 current_function_varargs = 0;
6199 current_function_stdarg = 0;
6201 /* We haven't made any trampolines for this function yet. */
6202 trampoline_list = 0;
6204 init_pending_stack_adjust ();
6205 inhibit_defer_pop = 0;
6207 current_function_outgoing_args_size = 0;
6209 if (init_lang_status)
6210 (*init_lang_status) (cfun);
6211 if (init_machine_status)
6212 (*init_machine_status) (cfun);
6215 /* Initialize the rtl expansion mechanism so that we can do simple things
6216 like generate sequences. This is used to provide a context during global
6217 initialization of some passes. */
6218 void
6219 init_dummy_function_start ()
6221 prepare_function_start ();
6224 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6225 and initialize static variables for generating RTL for the statements
6226 of the function. */
6228 void
6229 init_function_start (subr, filename, line)
6230 tree subr;
6231 const char *filename;
6232 int line;
6234 prepare_function_start ();
6236 current_function_name = (*decl_printable_name) (subr, 2);
6237 cfun->decl = subr;
6239 /* Nonzero if this is a nested function that uses a static chain. */
6241 current_function_needs_context
6242 = (decl_function_context (current_function_decl) != 0
6243 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6245 /* Within function body, compute a type's size as soon it is laid out. */
6246 immediate_size_expand++;
6248 /* Prevent ever trying to delete the first instruction of a function.
6249 Also tell final how to output a linenum before the function prologue.
6250 Note linenums could be missing, e.g. when compiling a Java .class file. */
6251 if (line > 0)
6252 emit_line_note (filename, line);
6254 /* Make sure first insn is a note even if we don't want linenums.
6255 This makes sure the first insn will never be deleted.
6256 Also, final expects a note to appear there. */
6257 emit_note (NULL, NOTE_INSN_DELETED);
6259 /* Set flags used by final.c. */
6260 if (aggregate_value_p (DECL_RESULT (subr)))
6262 #ifdef PCC_STATIC_STRUCT_RETURN
6263 current_function_returns_pcc_struct = 1;
6264 #endif
6265 current_function_returns_struct = 1;
6268 /* Warn if this value is an aggregate type,
6269 regardless of which calling convention we are using for it. */
6270 if (warn_aggregate_return
6271 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6272 warning ("function returns an aggregate");
6274 current_function_returns_pointer
6275 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6278 /* Make sure all values used by the optimization passes have sane
6279 defaults. */
6280 void
6281 init_function_for_compilation ()
6283 reg_renumber = 0;
6285 /* No prologue/epilogue insns yet. */
6286 VARRAY_GROW (prologue, 0);
6287 VARRAY_GROW (epilogue, 0);
6288 VARRAY_GROW (sibcall_epilogue, 0);
6291 /* Indicate that the current function uses extra args
6292 not explicitly mentioned in the argument list in any fashion. */
6294 void
6295 mark_varargs ()
6297 current_function_varargs = 1;
6300 /* Expand a call to __main at the beginning of a possible main function. */
6302 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6303 #undef HAS_INIT_SECTION
6304 #define HAS_INIT_SECTION
6305 #endif
6307 void
6308 expand_main_function ()
6310 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6311 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6313 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6314 rtx tmp, seq;
6316 start_sequence ();
6317 /* Forcibly align the stack. */
6318 #ifdef STACK_GROWS_DOWNWARD
6319 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6320 stack_pointer_rtx, 1, OPTAB_WIDEN);
6321 #else
6322 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6323 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6324 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6325 stack_pointer_rtx, 1, OPTAB_WIDEN);
6326 #endif
6327 if (tmp != stack_pointer_rtx)
6328 emit_move_insn (stack_pointer_rtx, tmp);
6330 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6331 tmp = force_reg (Pmode, const0_rtx);
6332 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6333 seq = gen_sequence ();
6334 end_sequence ();
6336 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6337 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6338 break;
6339 if (tmp)
6340 emit_insn_before (seq, tmp);
6341 else
6342 emit_insn (seq);
6344 #endif
6346 #ifndef HAS_INIT_SECTION
6347 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6348 VOIDmode, 0);
6349 #endif
6352 extern struct obstack permanent_obstack;
6354 /* The PENDING_SIZES represent the sizes of variable-sized types.
6355 Create RTL for the various sizes now (using temporary variables),
6356 so that we can refer to the sizes from the RTL we are generating
6357 for the current function. The PENDING_SIZES are a TREE_LIST. The
6358 TREE_VALUE of each node is a SAVE_EXPR. */
6360 void
6361 expand_pending_sizes (pending_sizes)
6362 tree pending_sizes;
6364 tree tem;
6366 /* Evaluate now the sizes of any types declared among the arguments. */
6367 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6369 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6370 /* Flush the queue in case this parameter declaration has
6371 side-effects. */
6372 emit_queue ();
6376 /* Start the RTL for a new function, and set variables used for
6377 emitting RTL.
6378 SUBR is the FUNCTION_DECL node.
6379 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6380 the function's parameters, which must be run at any return statement. */
6382 void
6383 expand_function_start (subr, parms_have_cleanups)
6384 tree subr;
6385 int parms_have_cleanups;
6387 tree tem;
6388 rtx last_ptr = NULL_RTX;
6390 /* Make sure volatile mem refs aren't considered
6391 valid operands of arithmetic insns. */
6392 init_recog_no_volatile ();
6394 current_function_instrument_entry_exit
6395 = (flag_instrument_function_entry_exit
6396 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6398 current_function_profile
6399 = (profile_flag
6400 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6402 current_function_limit_stack
6403 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6405 /* If function gets a static chain arg, store it in the stack frame.
6406 Do this first, so it gets the first stack slot offset. */
6407 if (current_function_needs_context)
6409 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6411 /* Delay copying static chain if it is not a register to avoid
6412 conflicts with regs used for parameters. */
6413 if (! SMALL_REGISTER_CLASSES
6414 || GET_CODE (static_chain_incoming_rtx) == REG)
6415 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6418 /* If the parameters of this function need cleaning up, get a label
6419 for the beginning of the code which executes those cleanups. This must
6420 be done before doing anything with return_label. */
6421 if (parms_have_cleanups)
6422 cleanup_label = gen_label_rtx ();
6423 else
6424 cleanup_label = 0;
6426 /* Make the label for return statements to jump to. Do not special
6427 case machines with special return instructions -- they will be
6428 handled later during jump, ifcvt, or epilogue creation. */
6429 return_label = gen_label_rtx ();
6431 /* Initialize rtx used to return the value. */
6432 /* Do this before assign_parms so that we copy the struct value address
6433 before any library calls that assign parms might generate. */
6435 /* Decide whether to return the value in memory or in a register. */
6436 if (aggregate_value_p (DECL_RESULT (subr)))
6438 /* Returning something that won't go in a register. */
6439 rtx value_address = 0;
6441 #ifdef PCC_STATIC_STRUCT_RETURN
6442 if (current_function_returns_pcc_struct)
6444 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6445 value_address = assemble_static_space (size);
6447 else
6448 #endif
6450 /* Expect to be passed the address of a place to store the value.
6451 If it is passed as an argument, assign_parms will take care of
6452 it. */
6453 if (struct_value_incoming_rtx)
6455 value_address = gen_reg_rtx (Pmode);
6456 emit_move_insn (value_address, struct_value_incoming_rtx);
6459 if (value_address)
6461 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6462 set_mem_attributes (x, DECL_RESULT (subr), 1);
6463 SET_DECL_RTL (DECL_RESULT (subr), x);
6466 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6467 /* If return mode is void, this decl rtl should not be used. */
6468 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6469 else
6471 /* Compute the return values into a pseudo reg, which we will copy
6472 into the true return register after the cleanups are done. */
6474 /* In order to figure out what mode to use for the pseudo, we
6475 figure out what the mode of the eventual return register will
6476 actually be, and use that. */
6477 rtx hard_reg
6478 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6479 subr, 1);
6481 /* Structures that are returned in registers are not aggregate_value_p,
6482 so we may see a PARALLEL. Don't play pseudo games with this. */
6483 if (! REG_P (hard_reg))
6484 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6485 else
6487 /* Create the pseudo. */
6488 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6490 /* Needed because we may need to move this to memory
6491 in case it's a named return value whose address is taken. */
6492 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6496 /* Initialize rtx for parameters and local variables.
6497 In some cases this requires emitting insns. */
6499 assign_parms (subr);
6501 /* Copy the static chain now if it wasn't a register. The delay is to
6502 avoid conflicts with the parameter passing registers. */
6504 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6505 if (GET_CODE (static_chain_incoming_rtx) != REG)
6506 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6508 /* The following was moved from init_function_start.
6509 The move is supposed to make sdb output more accurate. */
6510 /* Indicate the beginning of the function body,
6511 as opposed to parm setup. */
6512 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6514 if (GET_CODE (get_last_insn ()) != NOTE)
6515 emit_note (NULL, NOTE_INSN_DELETED);
6516 parm_birth_insn = get_last_insn ();
6518 context_display = 0;
6519 if (current_function_needs_context)
6521 /* Fetch static chain values for containing functions. */
6522 tem = decl_function_context (current_function_decl);
6523 /* Copy the static chain pointer into a pseudo. If we have
6524 small register classes, copy the value from memory if
6525 static_chain_incoming_rtx is a REG. */
6526 if (tem)
6528 /* If the static chain originally came in a register, put it back
6529 there, then move it out in the next insn. The reason for
6530 this peculiar code is to satisfy function integration. */
6531 if (SMALL_REGISTER_CLASSES
6532 && GET_CODE (static_chain_incoming_rtx) == REG)
6533 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6534 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6537 while (tem)
6539 tree rtlexp = make_node (RTL_EXPR);
6541 RTL_EXPR_RTL (rtlexp) = last_ptr;
6542 context_display = tree_cons (tem, rtlexp, context_display);
6543 tem = decl_function_context (tem);
6544 if (tem == 0)
6545 break;
6546 /* Chain thru stack frames, assuming pointer to next lexical frame
6547 is found at the place we always store it. */
6548 #ifdef FRAME_GROWS_DOWNWARD
6549 last_ptr = plus_constant (last_ptr,
6550 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6551 #endif
6552 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6553 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6554 last_ptr = copy_to_reg (last_ptr);
6556 /* If we are not optimizing, ensure that we know that this
6557 piece of context is live over the entire function. */
6558 if (! optimize)
6559 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6560 save_expr_regs);
6564 if (current_function_instrument_entry_exit)
6566 rtx fun = DECL_RTL (current_function_decl);
6567 if (GET_CODE (fun) == MEM)
6568 fun = XEXP (fun, 0);
6569 else
6570 abort ();
6571 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6572 2, fun, Pmode,
6573 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6575 hard_frame_pointer_rtx),
6576 Pmode);
6579 #ifdef PROFILE_HOOK
6580 if (current_function_profile)
6581 PROFILE_HOOK (profile_label_no);
6582 #endif
6584 /* After the display initializations is where the tail-recursion label
6585 should go, if we end up needing one. Ensure we have a NOTE here
6586 since some things (like trampolines) get placed before this. */
6587 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6589 /* Evaluate now the sizes of any types declared among the arguments. */
6590 expand_pending_sizes (nreverse (get_pending_sizes ()));
6592 /* Make sure there is a line number after the function entry setup code. */
6593 force_next_line_note ();
6596 /* Undo the effects of init_dummy_function_start. */
6597 void
6598 expand_dummy_function_end ()
6600 /* End any sequences that failed to be closed due to syntax errors. */
6601 while (in_sequence_p ())
6602 end_sequence ();
6604 /* Outside function body, can't compute type's actual size
6605 until next function's body starts. */
6607 free_after_parsing (cfun);
6608 free_after_compilation (cfun);
6609 cfun = 0;
6612 /* Call DOIT for each hard register used as a return value from
6613 the current function. */
6615 void
6616 diddle_return_value (doit, arg)
6617 void (*doit) PARAMS ((rtx, void *));
6618 void *arg;
6620 rtx outgoing = current_function_return_rtx;
6622 if (! outgoing)
6623 return;
6625 if (GET_CODE (outgoing) == REG)
6626 (*doit) (outgoing, arg);
6627 else if (GET_CODE (outgoing) == PARALLEL)
6629 int i;
6631 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6633 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6635 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6636 (*doit) (x, arg);
6641 static void
6642 do_clobber_return_reg (reg, arg)
6643 rtx reg;
6644 void *arg ATTRIBUTE_UNUSED;
6646 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6649 void
6650 clobber_return_register ()
6652 diddle_return_value (do_clobber_return_reg, NULL);
6654 /* In case we do use pseudo to return value, clobber it too. */
6655 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6657 tree decl_result = DECL_RESULT (current_function_decl);
6658 rtx decl_rtl = DECL_RTL (decl_result);
6659 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6661 do_clobber_return_reg (decl_rtl, NULL);
6666 static void
6667 do_use_return_reg (reg, arg)
6668 rtx reg;
6669 void *arg ATTRIBUTE_UNUSED;
6671 emit_insn (gen_rtx_USE (VOIDmode, reg));
6674 void
6675 use_return_register ()
6677 diddle_return_value (do_use_return_reg, NULL);
6680 /* Generate RTL for the end of the current function.
6681 FILENAME and LINE are the current position in the source file.
6683 It is up to language-specific callers to do cleanups for parameters--
6684 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6686 void
6687 expand_function_end (filename, line, end_bindings)
6688 const char *filename;
6689 int line;
6690 int end_bindings;
6692 tree link;
6693 rtx clobber_after;
6695 #ifdef TRAMPOLINE_TEMPLATE
6696 static rtx initial_trampoline;
6697 #endif
6699 finish_expr_for_function ();
6701 /* If arg_pointer_save_area was referenced only from a nested
6702 function, we will not have initialized it yet. Do that now. */
6703 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6704 get_arg_pointer_save_area (cfun);
6706 #ifdef NON_SAVING_SETJMP
6707 /* Don't put any variables in registers if we call setjmp
6708 on a machine that fails to restore the registers. */
6709 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6711 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6712 setjmp_protect (DECL_INITIAL (current_function_decl));
6714 setjmp_protect_args ();
6716 #endif
6718 /* Initialize any trampolines required by this function. */
6719 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6721 tree function = TREE_PURPOSE (link);
6722 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6723 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6724 #ifdef TRAMPOLINE_TEMPLATE
6725 rtx blktramp;
6726 #endif
6727 rtx seq;
6729 #ifdef TRAMPOLINE_TEMPLATE
6730 /* First make sure this compilation has a template for
6731 initializing trampolines. */
6732 if (initial_trampoline == 0)
6734 initial_trampoline
6735 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6736 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6738 ggc_add_rtx_root (&initial_trampoline, 1);
6740 #endif
6742 /* Generate insns to initialize the trampoline. */
6743 start_sequence ();
6744 tramp = round_trampoline_addr (XEXP (tramp, 0));
6745 #ifdef TRAMPOLINE_TEMPLATE
6746 blktramp = replace_equiv_address (initial_trampoline, tramp);
6747 emit_block_move (blktramp, initial_trampoline,
6748 GEN_INT (TRAMPOLINE_SIZE));
6749 #endif
6750 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6751 seq = get_insns ();
6752 end_sequence ();
6754 /* Put those insns at entry to the containing function (this one). */
6755 emit_insns_before (seq, tail_recursion_reentry);
6758 /* If we are doing stack checking and this function makes calls,
6759 do a stack probe at the start of the function to ensure we have enough
6760 space for another stack frame. */
6761 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6763 rtx insn, seq;
6765 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6766 if (GET_CODE (insn) == CALL_INSN)
6768 start_sequence ();
6769 probe_stack_range (STACK_CHECK_PROTECT,
6770 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6771 seq = get_insns ();
6772 end_sequence ();
6773 emit_insns_before (seq, tail_recursion_reentry);
6774 break;
6778 /* Warn about unused parms if extra warnings were specified. */
6779 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6780 warning. WARN_UNUSED_PARAMETER is negative when set by
6781 -Wunused. */
6782 if (warn_unused_parameter > 0
6783 || (warn_unused_parameter < 0 && extra_warnings))
6785 tree decl;
6787 for (decl = DECL_ARGUMENTS (current_function_decl);
6788 decl; decl = TREE_CHAIN (decl))
6789 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6790 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6791 warning_with_decl (decl, "unused parameter `%s'");
6794 /* Delete handlers for nonlocal gotos if nothing uses them. */
6795 if (nonlocal_goto_handler_slots != 0
6796 && ! current_function_has_nonlocal_label)
6797 delete_handlers ();
6799 /* End any sequences that failed to be closed due to syntax errors. */
6800 while (in_sequence_p ())
6801 end_sequence ();
6803 /* Outside function body, can't compute type's actual size
6804 until next function's body starts. */
6805 immediate_size_expand--;
6807 clear_pending_stack_adjust ();
6808 do_pending_stack_adjust ();
6810 /* Mark the end of the function body.
6811 If control reaches this insn, the function can drop through
6812 without returning a value. */
6813 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6815 /* Must mark the last line number note in the function, so that the test
6816 coverage code can avoid counting the last line twice. This just tells
6817 the code to ignore the immediately following line note, since there
6818 already exists a copy of this note somewhere above. This line number
6819 note is still needed for debugging though, so we can't delete it. */
6820 if (flag_test_coverage)
6821 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6823 /* Output a linenumber for the end of the function.
6824 SDB depends on this. */
6825 emit_line_note_force (filename, line);
6827 /* Before the return label (if any), clobber the return
6828 registers so that they are not propagated live to the rest of
6829 the function. This can only happen with functions that drop
6830 through; if there had been a return statement, there would
6831 have either been a return rtx, or a jump to the return label.
6833 We delay actual code generation after the current_function_value_rtx
6834 is computed. */
6835 clobber_after = get_last_insn ();
6837 /* Output the label for the actual return from the function,
6838 if one is expected. This happens either because a function epilogue
6839 is used instead of a return instruction, or because a return was done
6840 with a goto in order to run local cleanups, or because of pcc-style
6841 structure returning. */
6842 if (return_label)
6843 emit_label (return_label);
6845 /* C++ uses this. */
6846 if (end_bindings)
6847 expand_end_bindings (0, 0, 0);
6849 if (current_function_instrument_entry_exit)
6851 rtx fun = DECL_RTL (current_function_decl);
6852 if (GET_CODE (fun) == MEM)
6853 fun = XEXP (fun, 0);
6854 else
6855 abort ();
6856 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6857 2, fun, Pmode,
6858 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6860 hard_frame_pointer_rtx),
6861 Pmode);
6864 /* Let except.c know where it should emit the call to unregister
6865 the function context for sjlj exceptions. */
6866 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6867 sjlj_emit_function_exit_after (get_last_insn ());
6869 /* If we had calls to alloca, and this machine needs
6870 an accurate stack pointer to exit the function,
6871 insert some code to save and restore the stack pointer. */
6872 #ifdef EXIT_IGNORE_STACK
6873 if (! EXIT_IGNORE_STACK)
6874 #endif
6875 if (current_function_calls_alloca)
6877 rtx tem = 0;
6879 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6880 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6883 /* If scalar return value was computed in a pseudo-reg, or was a named
6884 return value that got dumped to the stack, copy that to the hard
6885 return register. */
6886 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6888 tree decl_result = DECL_RESULT (current_function_decl);
6889 rtx decl_rtl = DECL_RTL (decl_result);
6891 if (REG_P (decl_rtl)
6892 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6893 : DECL_REGISTER (decl_result))
6895 rtx real_decl_rtl;
6897 #ifdef FUNCTION_OUTGOING_VALUE
6898 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6899 current_function_decl);
6900 #else
6901 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6902 current_function_decl);
6903 #endif
6904 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6906 /* If this is a BLKmode structure being returned in registers,
6907 then use the mode computed in expand_return. Note that if
6908 decl_rtl is memory, then its mode may have been changed,
6909 but that current_function_return_rtx has not. */
6910 if (GET_MODE (real_decl_rtl) == BLKmode)
6911 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6913 /* If a named return value dumped decl_return to memory, then
6914 we may need to re-do the PROMOTE_MODE signed/unsigned
6915 extension. */
6916 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6918 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6920 #ifdef PROMOTE_FUNCTION_RETURN
6921 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6922 &unsignedp, 1);
6923 #endif
6925 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6927 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6928 emit_group_load (real_decl_rtl, decl_rtl,
6929 int_size_in_bytes (TREE_TYPE (decl_result)));
6930 else
6931 emit_move_insn (real_decl_rtl, decl_rtl);
6933 /* The delay slot scheduler assumes that current_function_return_rtx
6934 holds the hard register containing the return value, not a
6935 temporary pseudo. */
6936 current_function_return_rtx = real_decl_rtl;
6940 /* If returning a structure, arrange to return the address of the value
6941 in a place where debuggers expect to find it.
6943 If returning a structure PCC style,
6944 the caller also depends on this value.
6945 And current_function_returns_pcc_struct is not necessarily set. */
6946 if (current_function_returns_struct
6947 || current_function_returns_pcc_struct)
6949 rtx value_address
6950 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6951 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6952 #ifdef FUNCTION_OUTGOING_VALUE
6953 rtx outgoing
6954 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6955 current_function_decl);
6956 #else
6957 rtx outgoing
6958 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6959 #endif
6961 /* Mark this as a function return value so integrate will delete the
6962 assignment and USE below when inlining this function. */
6963 REG_FUNCTION_VALUE_P (outgoing) = 1;
6965 #ifdef POINTERS_EXTEND_UNSIGNED
6966 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6967 if (GET_MODE (outgoing) != GET_MODE (value_address))
6968 value_address = convert_memory_address (GET_MODE (outgoing),
6969 value_address);
6970 #endif
6972 emit_move_insn (outgoing, value_address);
6974 /* Show return register used to hold result (in this case the address
6975 of the result. */
6976 current_function_return_rtx = outgoing;
6979 /* If this is an implementation of throw, do what's necessary to
6980 communicate between __builtin_eh_return and the epilogue. */
6981 expand_eh_return ();
6983 /* Emit the actual code to clobber return register. */
6985 rtx seq, after;
6987 start_sequence ();
6988 clobber_return_register ();
6989 seq = gen_sequence ();
6990 end_sequence ();
6992 after = emit_insn_after (seq, clobber_after);
6994 if (clobber_after != after)
6995 cfun->x_clobber_return_insn = after;
6998 /* ??? This should no longer be necessary since stupid is no longer with
6999 us, but there are some parts of the compiler (eg reload_combine, and
7000 sh mach_dep_reorg) that still try and compute their own lifetime info
7001 instead of using the general framework. */
7002 use_return_register ();
7004 /* Fix up any gotos that jumped out to the outermost
7005 binding level of the function.
7006 Must follow emitting RETURN_LABEL. */
7008 /* If you have any cleanups to do at this point,
7009 and they need to create temporary variables,
7010 then you will lose. */
7011 expand_fixups (get_insns ());
7015 get_arg_pointer_save_area (f)
7016 struct function *f;
7018 rtx ret = f->x_arg_pointer_save_area;
7020 if (! ret)
7022 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7023 f->x_arg_pointer_save_area = ret;
7026 if (f == cfun && ! f->arg_pointer_save_area_init)
7028 rtx seq;
7030 /* Save the arg pointer at the beginning of the function. The
7031 generated stack slot may not be a valid memory address, so we
7032 have to check it and fix it if necessary. */
7033 start_sequence ();
7034 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7035 seq = gen_sequence ();
7036 end_sequence ();
7038 push_topmost_sequence ();
7039 emit_insn_after (seq, get_insns ());
7040 pop_topmost_sequence ();
7043 return ret;
7046 /* Extend a vector that records the INSN_UIDs of INSNS (either a
7047 sequence or a single insn). */
7049 static void
7050 record_insns (insns, vecp)
7051 rtx insns;
7052 varray_type *vecp;
7054 if (GET_CODE (insns) == SEQUENCE)
7056 int len = XVECLEN (insns, 0);
7057 int i = VARRAY_SIZE (*vecp);
7059 VARRAY_GROW (*vecp, i + len);
7060 while (--len >= 0)
7062 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
7063 ++i;
7066 else
7068 int i = VARRAY_SIZE (*vecp);
7069 VARRAY_GROW (*vecp, i + 1);
7070 VARRAY_INT (*vecp, i) = INSN_UID (insns);
7074 /* Determine how many INSN_UIDs in VEC are part of INSN. */
7076 static int
7077 contains (insn, vec)
7078 rtx insn;
7079 varray_type vec;
7081 int i, j;
7083 if (GET_CODE (insn) == INSN
7084 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7086 int count = 0;
7087 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7088 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7089 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7090 count++;
7091 return count;
7093 else
7095 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7096 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7097 return 1;
7099 return 0;
7103 prologue_epilogue_contains (insn)
7104 rtx insn;
7106 if (contains (insn, prologue))
7107 return 1;
7108 if (contains (insn, epilogue))
7109 return 1;
7110 return 0;
7114 sibcall_epilogue_contains (insn)
7115 rtx insn;
7117 if (sibcall_epilogue)
7118 return contains (insn, sibcall_epilogue);
7119 return 0;
7122 #ifdef HAVE_return
7123 /* Insert gen_return at the end of block BB. This also means updating
7124 block_for_insn appropriately. */
7126 static void
7127 emit_return_into_block (bb, line_note)
7128 basic_block bb;
7129 rtx line_note;
7131 rtx p, end;
7133 p = NEXT_INSN (bb->end);
7134 end = emit_jump_insn_after (gen_return (), bb->end);
7135 if (line_note)
7136 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7137 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7139 #endif /* HAVE_return */
7141 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7143 /* These functions convert the epilogue into a variant that does not modify the
7144 stack pointer. This is used in cases where a function returns an object
7145 whose size is not known until it is computed. The called function leaves the
7146 object on the stack, leaves the stack depressed, and returns a pointer to
7147 the object.
7149 What we need to do is track all modifications and references to the stack
7150 pointer, deleting the modifications and changing the references to point to
7151 the location the stack pointer would have pointed to had the modifications
7152 taken place.
7154 These functions need to be portable so we need to make as few assumptions
7155 about the epilogue as we can. However, the epilogue basically contains
7156 three things: instructions to reset the stack pointer, instructions to
7157 reload registers, possibly including the frame pointer, and an
7158 instruction to return to the caller.
7160 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7161 We also make no attempt to validate the insns we make since if they are
7162 invalid, we probably can't do anything valid. The intent is that these
7163 routines get "smarter" as more and more machines start to use them and
7164 they try operating on different epilogues.
7166 We use the following structure to track what the part of the epilogue that
7167 we've already processed has done. We keep two copies of the SP equivalence,
7168 one for use during the insn we are processing and one for use in the next
7169 insn. The difference is because one part of a PARALLEL may adjust SP
7170 and the other may use it. */
7172 struct epi_info
7174 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7175 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7176 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7177 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7178 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7179 should be set to once we no longer need
7180 its value. */
7183 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7184 static void emit_equiv_load PARAMS ((struct epi_info *));
7186 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7187 to the stack pointer. Return the new sequence. */
7189 static rtx
7190 keep_stack_depressed (seq)
7191 rtx seq;
7193 int i, j;
7194 struct epi_info info;
7196 /* If the epilogue is just a single instruction, it ust be OK as is. */
7198 if (GET_CODE (seq) != SEQUENCE)
7199 return seq;
7201 /* Otherwise, start a sequence, initialize the information we have, and
7202 process all the insns we were given. */
7203 start_sequence ();
7205 info.sp_equiv_reg = stack_pointer_rtx;
7206 info.sp_offset = 0;
7207 info.equiv_reg_src = 0;
7209 for (i = 0; i < XVECLEN (seq, 0); i++)
7211 rtx insn = XVECEXP (seq, 0, i);
7213 if (!INSN_P (insn))
7215 add_insn (insn);
7216 continue;
7219 /* If this insn references the register that SP is equivalent to and
7220 we have a pending load to that register, we must force out the load
7221 first and then indicate we no longer know what SP's equivalent is. */
7222 if (info.equiv_reg_src != 0
7223 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7225 emit_equiv_load (&info);
7226 info.sp_equiv_reg = 0;
7229 info.new_sp_equiv_reg = info.sp_equiv_reg;
7230 info.new_sp_offset = info.sp_offset;
7232 /* If this is a (RETURN) and the return address is on the stack,
7233 update the address and change to an indirect jump. */
7234 if (GET_CODE (PATTERN (insn)) == RETURN
7235 || (GET_CODE (PATTERN (insn)) == PARALLEL
7236 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7238 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7239 rtx base = 0;
7240 HOST_WIDE_INT offset = 0;
7241 rtx jump_insn, jump_set;
7243 /* If the return address is in a register, we can emit the insn
7244 unchanged. Otherwise, it must be a MEM and we see what the
7245 base register and offset are. In any case, we have to emit any
7246 pending load to the equivalent reg of SP, if any. */
7247 if (GET_CODE (retaddr) == REG)
7249 emit_equiv_load (&info);
7250 add_insn (insn);
7251 continue;
7253 else if (GET_CODE (retaddr) == MEM
7254 && GET_CODE (XEXP (retaddr, 0)) == REG)
7255 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7256 else if (GET_CODE (retaddr) == MEM
7257 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7258 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7259 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7261 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7262 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7264 else
7265 abort ();
7267 /* If the base of the location containing the return pointer
7268 is SP, we must update it with the replacement address. Otherwise,
7269 just build the necessary MEM. */
7270 retaddr = plus_constant (base, offset);
7271 if (base == stack_pointer_rtx)
7272 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7273 plus_constant (info.sp_equiv_reg,
7274 info.sp_offset));
7276 retaddr = gen_rtx_MEM (Pmode, retaddr);
7278 /* If there is a pending load to the equivalent register for SP
7279 and we reference that register, we must load our address into
7280 a scratch register and then do that load. */
7281 if (info.equiv_reg_src
7282 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7284 unsigned int regno;
7285 rtx reg;
7287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7288 if (HARD_REGNO_MODE_OK (regno, Pmode)
7289 && !fixed_regs[regno]
7290 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7291 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7292 regno)
7293 && !refers_to_regno_p (regno,
7294 regno + HARD_REGNO_NREGS (regno,
7295 Pmode),
7296 info.equiv_reg_src, NULL))
7297 break;
7299 if (regno == FIRST_PSEUDO_REGISTER)
7300 abort ();
7302 reg = gen_rtx_REG (Pmode, regno);
7303 emit_move_insn (reg, retaddr);
7304 retaddr = reg;
7307 emit_equiv_load (&info);
7308 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7310 /* Show the SET in the above insn is a RETURN. */
7311 jump_set = single_set (jump_insn);
7312 if (jump_set == 0)
7313 abort ();
7314 else
7315 SET_IS_RETURN_P (jump_set) = 1;
7318 /* If SP is not mentioned in the pattern and its equivalent register, if
7319 any, is not modified, just emit it. Otherwise, if neither is set,
7320 replace the reference to SP and emit the insn. If none of those are
7321 true, handle each SET individually. */
7322 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7323 && (info.sp_equiv_reg == stack_pointer_rtx
7324 || !reg_set_p (info.sp_equiv_reg, insn)))
7325 add_insn (insn);
7326 else if (! reg_set_p (stack_pointer_rtx, insn)
7327 && (info.sp_equiv_reg == stack_pointer_rtx
7328 || !reg_set_p (info.sp_equiv_reg, insn)))
7330 if (! validate_replace_rtx (stack_pointer_rtx,
7331 plus_constant (info.sp_equiv_reg,
7332 info.sp_offset),
7333 insn))
7334 abort ();
7336 add_insn (insn);
7338 else if (GET_CODE (PATTERN (insn)) == SET)
7339 handle_epilogue_set (PATTERN (insn), &info);
7340 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7342 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7343 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7344 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7346 else
7347 add_insn (insn);
7349 info.sp_equiv_reg = info.new_sp_equiv_reg;
7350 info.sp_offset = info.new_sp_offset;
7353 seq = gen_sequence ();
7354 end_sequence ();
7355 return seq;
7358 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7359 structure that contains information about what we've seen so far. We
7360 process this SET by either updating that data or by emitting one or
7361 more insns. */
7363 static void
7364 handle_epilogue_set (set, p)
7365 rtx set;
7366 struct epi_info *p;
7368 /* First handle the case where we are setting SP. Record what it is being
7369 set from. If unknown, abort. */
7370 if (reg_set_p (stack_pointer_rtx, set))
7372 if (SET_DEST (set) != stack_pointer_rtx)
7373 abort ();
7375 if (GET_CODE (SET_SRC (set)) == PLUS
7376 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7378 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7379 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7381 else
7382 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7384 /* If we are adjusting SP, we adjust from the old data. */
7385 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7387 p->new_sp_equiv_reg = p->sp_equiv_reg;
7388 p->new_sp_offset += p->sp_offset;
7391 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7392 abort ();
7394 return;
7397 /* Next handle the case where we are setting SP's equivalent register.
7398 If we already have a value to set it to, abort. We could update, but
7399 there seems little point in handling that case. Note that we have
7400 to allow for the case where we are setting the register set in
7401 the previous part of a PARALLEL inside a single insn. But use the
7402 old offset for any updates within this insn. */
7403 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7405 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7406 || p->equiv_reg_src != 0)
7407 abort ();
7408 else
7409 p->equiv_reg_src
7410 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7411 plus_constant (p->sp_equiv_reg,
7412 p->sp_offset));
7415 /* Otherwise, replace any references to SP in the insn to its new value
7416 and emit the insn. */
7417 else
7419 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7420 plus_constant (p->sp_equiv_reg,
7421 p->sp_offset));
7422 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7423 plus_constant (p->sp_equiv_reg,
7424 p->sp_offset));
7425 emit_insn (set);
7429 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7431 static void
7432 emit_equiv_load (p)
7433 struct epi_info *p;
7435 if (p->equiv_reg_src != 0)
7436 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7438 p->equiv_reg_src = 0;
7440 #endif
7442 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7443 this into place with notes indicating where the prologue ends and where
7444 the epilogue begins. Update the basic block information when possible. */
7446 void
7447 thread_prologue_and_epilogue_insns (f)
7448 rtx f ATTRIBUTE_UNUSED;
7450 int inserted = 0;
7451 edge e;
7452 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7453 rtx seq;
7454 #endif
7455 #ifdef HAVE_prologue
7456 rtx prologue_end = NULL_RTX;
7457 #endif
7458 #if defined (HAVE_epilogue) || defined(HAVE_return)
7459 rtx epilogue_end = NULL_RTX;
7460 #endif
7462 #ifdef HAVE_prologue
7463 if (HAVE_prologue)
7465 start_sequence ();
7466 seq = gen_prologue ();
7467 emit_insn (seq);
7469 /* Retain a map of the prologue insns. */
7470 if (GET_CODE (seq) != SEQUENCE)
7471 seq = get_insns ();
7472 record_insns (seq, &prologue);
7473 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7475 seq = gen_sequence ();
7476 end_sequence ();
7478 /* Can't deal with multiple successors of the entry block
7479 at the moment. Function should always have at least one
7480 entry point. */
7481 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7482 abort ();
7484 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7485 inserted = 1;
7487 #endif
7489 /* If the exit block has no non-fake predecessors, we don't need
7490 an epilogue. */
7491 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7492 if ((e->flags & EDGE_FAKE) == 0)
7493 break;
7494 if (e == NULL)
7495 goto epilogue_done;
7497 #ifdef HAVE_return
7498 if (optimize && HAVE_return)
7500 /* If we're allowed to generate a simple return instruction,
7501 then by definition we don't need a full epilogue. Examine
7502 the block that falls through to EXIT. If it does not
7503 contain any code, examine its predecessors and try to
7504 emit (conditional) return instructions. */
7506 basic_block last;
7507 edge e_next;
7508 rtx label;
7510 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7511 if (e->flags & EDGE_FALLTHRU)
7512 break;
7513 if (e == NULL)
7514 goto epilogue_done;
7515 last = e->src;
7517 /* Verify that there are no active instructions in the last block. */
7518 label = last->end;
7519 while (label && GET_CODE (label) != CODE_LABEL)
7521 if (active_insn_p (label))
7522 break;
7523 label = PREV_INSN (label);
7526 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7528 rtx epilogue_line_note = NULL_RTX;
7530 /* Locate the line number associated with the closing brace,
7531 if we can find one. */
7532 for (seq = get_last_insn ();
7533 seq && ! active_insn_p (seq);
7534 seq = PREV_INSN (seq))
7535 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7537 epilogue_line_note = seq;
7538 break;
7541 for (e = last->pred; e; e = e_next)
7543 basic_block bb = e->src;
7544 rtx jump;
7546 e_next = e->pred_next;
7547 if (bb == ENTRY_BLOCK_PTR)
7548 continue;
7550 jump = bb->end;
7551 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7552 continue;
7554 /* If we have an unconditional jump, we can replace that
7555 with a simple return instruction. */
7556 if (simplejump_p (jump))
7558 emit_return_into_block (bb, epilogue_line_note);
7559 delete_insn (jump);
7562 /* If we have a conditional jump, we can try to replace
7563 that with a conditional return instruction. */
7564 else if (condjump_p (jump))
7566 rtx ret, *loc;
7568 ret = SET_SRC (PATTERN (jump));
7569 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7570 loc = &XEXP (ret, 1);
7571 else
7572 loc = &XEXP (ret, 2);
7573 ret = gen_rtx_RETURN (VOIDmode);
7575 if (! validate_change (jump, loc, ret, 0))
7576 continue;
7577 if (JUMP_LABEL (jump))
7578 LABEL_NUSES (JUMP_LABEL (jump))--;
7580 /* If this block has only one successor, it both jumps
7581 and falls through to the fallthru block, so we can't
7582 delete the edge. */
7583 if (bb->succ->succ_next == NULL)
7584 continue;
7586 else
7587 continue;
7589 /* Fix up the CFG for the successful change we just made. */
7590 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7593 /* Emit a return insn for the exit fallthru block. Whether
7594 this is still reachable will be determined later. */
7596 emit_barrier_after (last->end);
7597 emit_return_into_block (last, epilogue_line_note);
7598 epilogue_end = last->end;
7599 last->succ->flags &= ~EDGE_FALLTHRU;
7600 goto epilogue_done;
7603 #endif
7604 #ifdef HAVE_epilogue
7605 if (HAVE_epilogue)
7607 /* Find the edge that falls through to EXIT. Other edges may exist
7608 due to RETURN instructions, but those don't need epilogues.
7609 There really shouldn't be a mixture -- either all should have
7610 been converted or none, however... */
7612 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7613 if (e->flags & EDGE_FALLTHRU)
7614 break;
7615 if (e == NULL)
7616 goto epilogue_done;
7618 start_sequence ();
7619 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7621 seq = gen_epilogue ();
7623 #ifdef INCOMING_RETURN_ADDR_RTX
7624 /* If this function returns with the stack depressed and we can support
7625 it, massage the epilogue to actually do that. */
7626 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7627 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7628 seq = keep_stack_depressed (seq);
7629 #endif
7631 emit_jump_insn (seq);
7633 /* Retain a map of the epilogue insns. */
7634 if (GET_CODE (seq) != SEQUENCE)
7635 seq = get_insns ();
7636 record_insns (seq, &epilogue);
7638 seq = gen_sequence ();
7639 end_sequence ();
7641 insert_insn_on_edge (seq, e);
7642 inserted = 1;
7644 #endif
7645 epilogue_done:
7647 if (inserted)
7648 commit_edge_insertions ();
7650 #ifdef HAVE_sibcall_epilogue
7651 /* Emit sibling epilogues before any sibling call sites. */
7652 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7654 basic_block bb = e->src;
7655 rtx insn = bb->end;
7656 rtx i;
7657 rtx newinsn;
7659 if (GET_CODE (insn) != CALL_INSN
7660 || ! SIBLING_CALL_P (insn))
7661 continue;
7663 start_sequence ();
7664 seq = gen_sibcall_epilogue ();
7665 end_sequence ();
7667 i = PREV_INSN (insn);
7668 newinsn = emit_insn_before (seq, insn);
7670 /* Retain a map of the epilogue insns. Used in life analysis to
7671 avoid getting rid of sibcall epilogue insns. */
7672 record_insns (GET_CODE (seq) == SEQUENCE
7673 ? seq : newinsn, &sibcall_epilogue);
7675 #endif
7677 #ifdef HAVE_prologue
7678 if (prologue_end)
7680 rtx insn, prev;
7682 /* GDB handles `break f' by setting a breakpoint on the first
7683 line note after the prologue. Which means (1) that if
7684 there are line number notes before where we inserted the
7685 prologue we should move them, and (2) we should generate a
7686 note before the end of the first basic block, if there isn't
7687 one already there.
7689 ??? This behaviour is completely broken when dealing with
7690 multiple entry functions. We simply place the note always
7691 into first basic block and let alternate entry points
7692 to be missed.
7695 for (insn = prologue_end; insn; insn = prev)
7697 prev = PREV_INSN (insn);
7698 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7700 /* Note that we cannot reorder the first insn in the
7701 chain, since rest_of_compilation relies on that
7702 remaining constant. */
7703 if (prev == NULL)
7704 break;
7705 reorder_insns (insn, insn, prologue_end);
7709 /* Find the last line number note in the first block. */
7710 for (insn = BASIC_BLOCK (0)->end;
7711 insn != prologue_end && insn;
7712 insn = PREV_INSN (insn))
7713 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7714 break;
7716 /* If we didn't find one, make a copy of the first line number
7717 we run across. */
7718 if (! insn)
7720 for (insn = next_active_insn (prologue_end);
7721 insn;
7722 insn = PREV_INSN (insn))
7723 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7725 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7726 NOTE_LINE_NUMBER (insn),
7727 prologue_end);
7728 break;
7732 #endif
7733 #ifdef HAVE_epilogue
7734 if (epilogue_end)
7736 rtx insn, next;
7738 /* Similarly, move any line notes that appear after the epilogue.
7739 There is no need, however, to be quite so anal about the existence
7740 of such a note. */
7741 for (insn = epilogue_end; insn; insn = next)
7743 next = NEXT_INSN (insn);
7744 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7745 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7748 #endif
7751 /* Reposition the prologue-end and epilogue-begin notes after instruction
7752 scheduling and delayed branch scheduling. */
7754 void
7755 reposition_prologue_and_epilogue_notes (f)
7756 rtx f ATTRIBUTE_UNUSED;
7758 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7759 int len;
7761 if ((len = VARRAY_SIZE (prologue)) > 0)
7763 rtx insn, note = 0;
7765 /* Scan from the beginning until we reach the last prologue insn.
7766 We apparently can't depend on basic_block_{head,end} after
7767 reorg has run. */
7768 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7770 if (GET_CODE (insn) == NOTE)
7772 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7773 note = insn;
7775 else if ((len -= contains (insn, prologue)) == 0)
7777 rtx next;
7778 /* Find the prologue-end note if we haven't already, and
7779 move it to just after the last prologue insn. */
7780 if (note == 0)
7782 for (note = insn; (note = NEXT_INSN (note));)
7783 if (GET_CODE (note) == NOTE
7784 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7785 break;
7788 next = NEXT_INSN (note);
7790 /* Whether or not we can depend on BLOCK_HEAD,
7791 attempt to keep it up-to-date. */
7792 if (BLOCK_HEAD (0) == note)
7793 BLOCK_HEAD (0) = next;
7795 remove_insn (note);
7796 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7797 if (GET_CODE (insn) == CODE_LABEL)
7798 insn = NEXT_INSN (insn);
7799 add_insn_after (note, insn);
7804 if ((len = VARRAY_SIZE (epilogue)) > 0)
7806 rtx insn, note = 0;
7808 /* Scan from the end until we reach the first epilogue insn.
7809 We apparently can't depend on basic_block_{head,end} after
7810 reorg has run. */
7811 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7813 if (GET_CODE (insn) == NOTE)
7815 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7816 note = insn;
7818 else if ((len -= contains (insn, epilogue)) == 0)
7820 /* Find the epilogue-begin note if we haven't already, and
7821 move it to just before the first epilogue insn. */
7822 if (note == 0)
7824 for (note = insn; (note = PREV_INSN (note));)
7825 if (GET_CODE (note) == NOTE
7826 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7827 break;
7830 /* Whether or not we can depend on BLOCK_HEAD,
7831 attempt to keep it up-to-date. */
7832 if (n_basic_blocks
7833 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7834 BLOCK_HEAD (n_basic_blocks-1) = note;
7836 remove_insn (note);
7837 add_insn_before (note, insn);
7841 #endif /* HAVE_prologue or HAVE_epilogue */
7844 /* Mark P for GC. */
7846 static void
7847 mark_function_status (p)
7848 struct function *p;
7850 struct var_refs_queue *q;
7851 struct temp_slot *t;
7852 int i;
7853 rtx *r;
7855 if (p == 0)
7856 return;
7858 ggc_mark_rtx (p->arg_offset_rtx);
7860 if (p->x_parm_reg_stack_loc)
7861 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7862 i > 0; --i, ++r)
7863 ggc_mark_rtx (*r);
7865 ggc_mark_rtx (p->return_rtx);
7866 ggc_mark_rtx (p->x_cleanup_label);
7867 ggc_mark_rtx (p->x_return_label);
7868 ggc_mark_rtx (p->x_save_expr_regs);
7869 ggc_mark_rtx (p->x_stack_slot_list);
7870 ggc_mark_rtx (p->x_parm_birth_insn);
7871 ggc_mark_rtx (p->x_tail_recursion_label);
7872 ggc_mark_rtx (p->x_tail_recursion_reentry);
7873 ggc_mark_rtx (p->internal_arg_pointer);
7874 ggc_mark_rtx (p->x_arg_pointer_save_area);
7875 ggc_mark_tree (p->x_rtl_expr_chain);
7876 ggc_mark_rtx (p->x_last_parm_insn);
7877 ggc_mark_tree (p->x_context_display);
7878 ggc_mark_tree (p->x_trampoline_list);
7879 ggc_mark_rtx (p->epilogue_delay_list);
7880 ggc_mark_rtx (p->x_clobber_return_insn);
7882 for (t = p->x_temp_slots; t != 0; t = t->next)
7884 ggc_mark (t);
7885 ggc_mark_rtx (t->slot);
7886 ggc_mark_rtx (t->address);
7887 ggc_mark_tree (t->rtl_expr);
7888 ggc_mark_tree (t->type);
7891 for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
7893 ggc_mark (q);
7894 ggc_mark_rtx (q->modified);
7897 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7898 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7899 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7900 ggc_mark_tree (p->x_nonlocal_labels);
7902 mark_hard_reg_initial_vals (p);
7905 /* Mark the struct function pointed to by *ARG for GC, if it is not
7906 NULL. This is used to mark the current function and the outer
7907 function chain. */
7909 static void
7910 maybe_mark_struct_function (arg)
7911 void *arg;
7913 struct function *f = *(struct function **) arg;
7915 if (f == 0)
7916 return;
7918 ggc_mark_struct_function (f);
7921 /* Mark a struct function * for GC. This is called from ggc-common.c. */
7923 void
7924 ggc_mark_struct_function (f)
7925 struct function *f;
7927 ggc_mark (f);
7928 ggc_mark_tree (f->decl);
7930 mark_function_status (f);
7931 mark_eh_status (f->eh);
7932 mark_stmt_status (f->stmt);
7933 mark_expr_status (f->expr);
7934 mark_emit_status (f->emit);
7935 mark_varasm_status (f->varasm);
7937 if (mark_machine_status)
7938 (*mark_machine_status) (f);
7939 if (mark_lang_status)
7940 (*mark_lang_status) (f);
7942 if (f->original_arg_vector)
7943 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7944 if (f->original_decl_initial)
7945 ggc_mark_tree (f->original_decl_initial);
7946 if (f->outer)
7947 ggc_mark_struct_function (f->outer);
7950 /* Called once, at initialization, to initialize function.c. */
7952 void
7953 init_function_once ()
7955 ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
7956 ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
7957 maybe_mark_struct_function);
7959 VARRAY_INT_INIT (prologue, 0, "prologue");
7960 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7961 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");