* c-decl.c (duplicate_decls): Conditionalize DECL_SAVED_TREE copy.
[official-gcc.git] / gcc / function.c
blob0891dd008f432fdf60ec1665483544c508ff3562
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hash.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't contain any instructions
106 that can throw an exception. This is set prior to final. */
108 int current_function_nothrow;
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero.
122 calls.c:emit_library_call_value_1 uses it to set up
123 post-instantiation libcalls. */
124 int virtuals_instantiated;
126 /* These variables hold pointers to functions to create and destroy
127 target specific, per-function data structures. */
128 void (*init_machine_status) PARAMS ((struct function *));
129 void (*free_machine_status) PARAMS ((struct function *));
130 /* This variable holds a pointer to a function to register any
131 data items in the target specific, per-function data structure
132 that will need garbage collection. */
133 void (*mark_machine_status) PARAMS ((struct function *));
135 /* Likewise, but for language-specific data. */
136 void (*init_lang_status) PARAMS ((struct function *));
137 void (*save_lang_status) PARAMS ((struct function *));
138 void (*restore_lang_status) PARAMS ((struct function *));
139 void (*mark_lang_status) PARAMS ((struct function *));
140 void (*free_lang_status) PARAMS ((struct function *));
142 /* The FUNCTION_DECL for an inline function currently being expanded. */
143 tree inline_function_decl;
145 /* The currently compiled function. */
146 struct function *cfun = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static varray_type prologue;
150 static varray_type epilogue;
152 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
153 in this function. */
154 static varray_type sibcall_epilogue;
156 /* In order to evaluate some expressions, such as function calls returning
157 structures in memory, we need to temporarily allocate stack locations.
158 We record each allocated temporary in the following structure.
160 Associated with each temporary slot is a nesting level. When we pop up
161 one level, all temporaries associated with the previous level are freed.
162 Normally, all temporaries are freed after the execution of the statement
163 in which they were created. However, if we are inside a ({...}) grouping,
164 the result may be in a temporary and hence must be preserved. If the
165 result could be in a temporary, we preserve it if we can determine which
166 one it is in. If we cannot determine which temporary may contain the
167 result, all temporaries are preserved. A temporary is preserved by
168 pretending it was allocated at the previous nesting level.
170 Automatic variables are also assigned temporary slots, at the nesting
171 level where they are defined. They are marked a "kept" so that
172 free_temp_slots will not free them. */
174 struct temp_slot
176 /* Points to next temporary slot. */
177 struct temp_slot *next;
178 /* The rtx to used to reference the slot. */
179 rtx slot;
180 /* The rtx used to represent the address if not the address of the
181 slot above. May be an EXPR_LIST if multiple addresses exist. */
182 rtx address;
183 /* The alignment (in bits) of the slot. */
184 unsigned int align;
185 /* The size, in units, of the slot. */
186 HOST_WIDE_INT size;
187 /* The type of the object in the slot, or zero if it doesn't correspond
188 to a type. We use this to determine whether a slot can be reused.
189 It can be reused if objects of the type of the new slot will always
190 conflict with objects of the type of the old slot. */
191 tree type;
192 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
193 tree rtl_expr;
194 /* Non-zero if this temporary is currently in use. */
195 char in_use;
196 /* Non-zero if this temporary has its address taken. */
197 char addr_taken;
198 /* Nesting level at which this slot is being used. */
199 int level;
200 /* Non-zero if this should survive a call to free_temp_slots. */
201 int keep;
202 /* The offset of the slot from the frame_pointer, including extra space
203 for alignment. This info is for combine_temp_slots. */
204 HOST_WIDE_INT base_offset;
205 /* The size of the slot, including extra space for alignment. This
206 info is for combine_temp_slots. */
207 HOST_WIDE_INT full_size;
210 /* This structure is used to record MEMs or pseudos used to replace VAR, any
211 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
212 maintain this list in case two operands of an insn were required to match;
213 in that case we must ensure we use the same replacement. */
215 struct fixup_replacement
217 rtx old;
218 rtx new;
219 struct fixup_replacement *next;
222 struct insns_for_mem_entry
224 /* The KEY in HE will be a MEM. */
225 struct hash_entry he;
226 /* These are the INSNS which reference the MEM. */
227 rtx insns;
230 /* Forward declarations. */
232 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
233 int, struct function *));
234 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
235 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
236 enum machine_mode, enum machine_mode,
237 int, unsigned int, int,
238 struct hash_table *));
239 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
240 enum machine_mode,
241 struct hash_table *));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
247 int, int));
248 static void fixup_var_refs_insns_with_hash
249 PARAMS ((struct hash_table *, rtx,
250 enum machine_mode, int));
251 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
252 int, int));
253 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
254 struct fixup_replacement **));
255 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
256 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
257 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
258 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
259 static void instantiate_decls PARAMS ((tree, int));
260 static void instantiate_decls_1 PARAMS ((tree, int));
261 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
262 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
263 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
264 static void delete_handlers PARAMS ((void));
265 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
266 struct args_size *));
267 #ifndef ARGS_GROW_DOWNWARD
268 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
269 tree));
270 #endif
271 static rtx round_trampoline_addr PARAMS ((rtx));
272 static rtx adjust_trampoline_addr PARAMS ((rtx));
273 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
274 static void reorder_blocks_0 PARAMS ((tree));
275 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
276 static void reorder_fix_fragments PARAMS ((tree));
277 static tree blocks_nreverse PARAMS ((tree));
278 static int all_blocks PARAMS ((tree, tree *));
279 static tree *get_block_vector PARAMS ((tree, int *));
280 /* We always define `record_insns' even if its not used so that we
281 can always export `prologue_epilogue_contains'. */
282 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
283 static int contains PARAMS ((rtx, varray_type));
284 #ifdef HAVE_return
285 static void emit_return_into_block PARAMS ((basic_block, rtx));
286 #endif
287 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
288 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
289 struct hash_table *));
290 static void purge_single_hard_subreg_set PARAMS ((rtx));
291 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
292 static rtx keep_stack_depressed PARAMS ((rtx));
293 #endif
294 static int is_addressof PARAMS ((rtx *, void *));
295 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
296 struct hash_table *,
297 hash_table_key));
298 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
299 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
300 static int insns_for_mem_walk PARAMS ((rtx *, void *));
301 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
302 static void mark_function_status PARAMS ((struct function *));
303 static void maybe_mark_struct_function PARAMS ((void *));
304 static void prepare_function_start PARAMS ((void));
305 static void do_clobber_return_reg PARAMS ((rtx, void *));
306 static void do_use_return_reg PARAMS ((rtx, void *));
308 /* Pointer to chain of `struct function' for containing functions. */
309 static struct function *outer_function_chain;
311 /* Given a function decl for a containing function,
312 return the `struct function' for it. */
314 struct function *
315 find_function_data (decl)
316 tree decl;
318 struct function *p;
320 for (p = outer_function_chain; p; p = p->outer)
321 if (p->decl == decl)
322 return p;
324 abort ();
327 /* Save the current context for compilation of a nested function.
328 This is called from language-specific code. The caller should use
329 the save_lang_status callback to save any language-specific state,
330 since this function knows only about language-independent
331 variables. */
333 void
334 push_function_context_to (context)
335 tree context;
337 struct function *p;
339 if (context)
341 if (context == current_function_decl)
342 cfun->contains_functions = 1;
343 else
345 struct function *containing = find_function_data (context);
346 containing->contains_functions = 1;
350 if (cfun == 0)
351 init_dummy_function_start ();
352 p = cfun;
354 p->outer = outer_function_chain;
355 outer_function_chain = p;
356 p->fixup_var_refs_queue = 0;
358 if (save_lang_status)
359 (*save_lang_status) (p);
361 cfun = 0;
364 void
365 push_function_context ()
367 push_function_context_to (current_function_decl);
370 /* Restore the last saved context, at the end of a nested function.
371 This function is called from language-specific code. */
373 void
374 pop_function_context_from (context)
375 tree context ATTRIBUTE_UNUSED;
377 struct function *p = outer_function_chain;
378 struct var_refs_queue *queue;
380 cfun = p;
381 outer_function_chain = p->outer;
383 current_function_decl = p->decl;
384 reg_renumber = 0;
386 restore_emit_status (p);
388 if (restore_lang_status)
389 (*restore_lang_status) (p);
391 /* Finish doing put_var_into_stack for any of our variables
392 which became addressable during the nested function. */
393 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
394 fixup_var_refs (queue->modified, queue->promoted_mode,
395 queue->unsignedp, 0);
397 p->fixup_var_refs_queue = 0;
399 /* Reset variables that have known state during rtx generation. */
400 rtx_equal_function_value_matters = 1;
401 virtuals_instantiated = 0;
402 generating_concat_p = 1;
405 void
406 pop_function_context ()
408 pop_function_context_from (current_function_decl);
411 /* Clear out all parts of the state in F that can safely be discarded
412 after the function has been parsed, but not compiled, to let
413 garbage collection reclaim the memory. */
415 void
416 free_after_parsing (f)
417 struct function *f;
419 /* f->expr->forced_labels is used by code generation. */
420 /* f->emit->regno_reg_rtx is used by code generation. */
421 /* f->varasm is used by code generation. */
422 /* f->eh->eh_return_stub_label is used by code generation. */
424 if (free_lang_status)
425 (*free_lang_status) (f);
426 free_stmt_status (f);
429 /* Clear out all parts of the state in F that can safely be discarded
430 after the function has been compiled, to let garbage collection
431 reclaim the memory. */
433 void
434 free_after_compilation (f)
435 struct function *f;
437 free_eh_status (f);
438 free_expr_status (f);
439 free_emit_status (f);
440 free_varasm_status (f);
442 if (free_machine_status)
443 (*free_machine_status) (f);
445 if (f->x_parm_reg_stack_loc)
446 free (f->x_parm_reg_stack_loc);
448 f->x_temp_slots = NULL;
449 f->arg_offset_rtx = NULL;
450 f->return_rtx = NULL;
451 f->internal_arg_pointer = NULL;
452 f->x_nonlocal_labels = NULL;
453 f->x_nonlocal_goto_handler_slots = NULL;
454 f->x_nonlocal_goto_handler_labels = NULL;
455 f->x_nonlocal_goto_stack_level = NULL;
456 f->x_cleanup_label = NULL;
457 f->x_return_label = NULL;
458 f->x_save_expr_regs = NULL;
459 f->x_stack_slot_list = NULL;
460 f->x_rtl_expr_chain = NULL;
461 f->x_tail_recursion_label = NULL;
462 f->x_tail_recursion_reentry = NULL;
463 f->x_arg_pointer_save_area = NULL;
464 f->x_clobber_return_insn = NULL;
465 f->x_context_display = NULL;
466 f->x_trampoline_list = NULL;
467 f->x_parm_birth_insn = NULL;
468 f->x_last_parm_insn = NULL;
469 f->x_parm_reg_stack_loc = NULL;
470 f->fixup_var_refs_queue = NULL;
471 f->original_arg_vector = NULL;
472 f->original_decl_initial = NULL;
473 f->inl_last_parm_insn = NULL;
474 f->epilogue_delay_list = NULL;
477 /* Allocate fixed slots in the stack frame of the current function. */
479 /* Return size needed for stack frame based on slots so far allocated in
480 function F.
481 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
482 the caller may have to do that. */
484 HOST_WIDE_INT
485 get_func_frame_size (f)
486 struct function *f;
488 #ifdef FRAME_GROWS_DOWNWARD
489 return -f->x_frame_offset;
490 #else
491 return f->x_frame_offset;
492 #endif
495 /* Return size needed for stack frame based on slots so far allocated.
496 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
497 the caller may have to do that. */
498 HOST_WIDE_INT
499 get_frame_size ()
501 return get_func_frame_size (cfun);
504 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
505 with machine mode MODE.
507 ALIGN controls the amount of alignment for the address of the slot:
508 0 means according to MODE,
509 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
510 positive specifies alignment boundary in bits.
512 We do not round to stack_boundary here.
514 FUNCTION specifies the function to allocate in. */
516 static rtx
517 assign_stack_local_1 (mode, size, align, function)
518 enum machine_mode mode;
519 HOST_WIDE_INT size;
520 int align;
521 struct function *function;
523 rtx x, addr;
524 int bigend_correction = 0;
525 int alignment;
527 if (align == 0)
529 tree type;
531 if (mode == BLKmode)
532 alignment = BIGGEST_ALIGNMENT;
533 else
534 alignment = GET_MODE_ALIGNMENT (mode);
536 /* Allow the target to (possibly) increase the alignment of this
537 stack slot. */
538 type = type_for_mode (mode, 0);
539 if (type)
540 alignment = LOCAL_ALIGNMENT (type, alignment);
542 alignment /= BITS_PER_UNIT;
544 else if (align == -1)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 size = CEIL_ROUND (size, alignment);
549 else
550 alignment = align / BITS_PER_UNIT;
552 #ifdef FRAME_GROWS_DOWNWARD
553 function->x_frame_offset -= size;
554 #endif
556 /* Ignore alignment we can't do with expected alignment of the boundary. */
557 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
558 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
560 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
561 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
563 /* Round frame offset to that alignment.
564 We must be careful here, since FRAME_OFFSET might be negative and
565 division with a negative dividend isn't as well defined as we might
566 like. So we instead assume that ALIGNMENT is a power of two and
567 use logical operations which are unambiguous. */
568 #ifdef FRAME_GROWS_DOWNWARD
569 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
570 #else
571 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
572 #endif
574 /* On a big-endian machine, if we are allocating more space than we will use,
575 use the least significant bytes of those that are allocated. */
576 if (BYTES_BIG_ENDIAN && mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (function == cfun && virtuals_instantiated)
582 addr = plus_constant (frame_pointer_rtx,
583 (frame_offset + bigend_correction
584 + STARTING_FRAME_OFFSET));
585 else
586 addr = plus_constant (virtual_stack_vars_rtx,
587 function->x_frame_offset + bigend_correction);
589 #ifndef FRAME_GROWS_DOWNWARD
590 function->x_frame_offset += size;
591 #endif
593 x = gen_rtx_MEM (mode, addr);
595 function->x_stack_slot_list
596 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
598 return x;
601 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
602 current function. */
605 assign_stack_local (mode, size, align)
606 enum machine_mode mode;
607 HOST_WIDE_INT size;
608 int align;
610 return assign_stack_local_1 (mode, size, align, cfun);
613 /* Allocate a temporary stack slot and record it for possible later
614 reuse.
616 MODE is the machine mode to be given to the returned rtx.
618 SIZE is the size in units of the space required. We do no rounding here
619 since assign_stack_local will do any required rounding.
621 KEEP is 1 if this slot is to be retained after a call to
622 free_temp_slots. Automatic variables for a block are allocated
623 with this flag. KEEP is 2 if we allocate a longer term temporary,
624 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
625 if we are to allocate something at an inner level to be treated as
626 a variable in the block (e.g., a SAVE_EXPR).
628 TYPE is the type that will be used for the stack slot. */
631 assign_stack_temp_for_type (mode, size, keep, type)
632 enum machine_mode mode;
633 HOST_WIDE_INT size;
634 int keep;
635 tree type;
637 unsigned int align;
638 struct temp_slot *p, *best_p = 0;
640 /* If SIZE is -1 it means that somebody tried to allocate a temporary
641 of a variable size. */
642 if (size == -1)
643 abort ();
645 if (mode == BLKmode)
646 align = BIGGEST_ALIGNMENT;
647 else
648 align = GET_MODE_ALIGNMENT (mode);
650 if (! type)
651 type = type_for_mode (mode, 0);
653 if (type)
654 align = LOCAL_ALIGNMENT (type, align);
656 /* Try to find an available, already-allocated temporary of the proper
657 mode which meets the size and alignment requirements. Choose the
658 smallest one with the closest alignment. */
659 for (p = temp_slots; p; p = p->next)
660 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
661 && ! p->in_use
662 && objects_must_conflict_p (p->type, type)
663 && (best_p == 0 || best_p->size > p->size
664 || (best_p->size == p->size && best_p->align > p->align)))
666 if (p->align == align && p->size == size)
668 best_p = 0;
669 break;
671 best_p = p;
674 /* Make our best, if any, the one to use. */
675 if (best_p)
677 /* If there are enough aligned bytes left over, make them into a new
678 temp_slot so that the extra bytes don't get wasted. Do this only
679 for BLKmode slots, so that we can be sure of the alignment. */
680 if (GET_MODE (best_p->slot) == BLKmode)
682 int alignment = best_p->align / BITS_PER_UNIT;
683 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
685 if (best_p->size - rounded_size >= alignment)
687 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
688 p->in_use = p->addr_taken = 0;
689 p->size = best_p->size - rounded_size;
690 p->base_offset = best_p->base_offset + rounded_size;
691 p->full_size = best_p->full_size - rounded_size;
692 p->slot = gen_rtx_MEM (BLKmode,
693 plus_constant (XEXP (best_p->slot, 0),
694 rounded_size));
695 p->align = best_p->align;
696 p->address = 0;
697 p->rtl_expr = 0;
698 p->type = best_p->type;
699 p->next = temp_slots;
700 temp_slots = p;
702 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
703 stack_slot_list);
705 best_p->size = rounded_size;
706 best_p->full_size = rounded_size;
710 p = best_p;
713 /* If we still didn't find one, make a new temporary. */
714 if (p == 0)
716 HOST_WIDE_INT frame_offset_old = frame_offset;
718 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
720 /* We are passing an explicit alignment request to assign_stack_local.
721 One side effect of that is assign_stack_local will not round SIZE
722 to ensure the frame offset remains suitably aligned.
724 So for requests which depended on the rounding of SIZE, we go ahead
725 and round it now. We also make sure ALIGNMENT is at least
726 BIGGEST_ALIGNMENT. */
727 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
728 abort();
729 p->slot = assign_stack_local (mode,
730 (mode == BLKmode
731 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
732 : size),
733 align);
735 p->align = align;
737 /* The following slot size computation is necessary because we don't
738 know the actual size of the temporary slot until assign_stack_local
739 has performed all the frame alignment and size rounding for the
740 requested temporary. Note that extra space added for alignment
741 can be either above or below this stack slot depending on which
742 way the frame grows. We include the extra space if and only if it
743 is above this slot. */
744 #ifdef FRAME_GROWS_DOWNWARD
745 p->size = frame_offset_old - frame_offset;
746 #else
747 p->size = size;
748 #endif
750 /* Now define the fields used by combine_temp_slots. */
751 #ifdef FRAME_GROWS_DOWNWARD
752 p->base_offset = frame_offset;
753 p->full_size = frame_offset_old - frame_offset;
754 #else
755 p->base_offset = frame_offset_old;
756 p->full_size = frame_offset - frame_offset_old;
757 #endif
758 p->address = 0;
759 p->next = temp_slots;
760 temp_slots = p;
763 p->in_use = 1;
764 p->addr_taken = 0;
765 p->rtl_expr = seq_rtl_expr;
766 p->type = type;
768 if (keep == 2)
770 p->level = target_temp_slot_level;
771 p->keep = 0;
773 else if (keep == 3)
775 p->level = var_temp_slot_level;
776 p->keep = 0;
778 else
780 p->level = temp_slot_level;
781 p->keep = keep;
784 /* We may be reusing an old slot, so clear any MEM flags that may have been
785 set from before. */
786 RTX_UNCHANGING_P (p->slot) = 0;
787 MEM_IN_STRUCT_P (p->slot) = 0;
788 MEM_SCALAR_P (p->slot) = 0;
789 MEM_VOLATILE_P (p->slot) = 0;
790 set_mem_alias_set (p->slot, 0);
792 /* If we know the alias set for the memory that will be used, use
793 it. If there's no TYPE, then we don't know anything about the
794 alias set for the memory. */
795 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
796 set_mem_align (p->slot, align);
798 /* If a type is specified, set the relevant flags. */
799 if (type != 0)
801 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
802 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
803 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
806 return p->slot;
809 /* Allocate a temporary stack slot and record it for possible later
810 reuse. First three arguments are same as in preceding function. */
813 assign_stack_temp (mode, size, keep)
814 enum machine_mode mode;
815 HOST_WIDE_INT size;
816 int keep;
818 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
821 /* Assign a temporary of given TYPE.
822 KEEP is as for assign_stack_temp.
823 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
824 it is 0 if a register is OK.
825 DONT_PROMOTE is 1 if we should not promote values in register
826 to wider modes. */
829 assign_temp (type, keep, memory_required, dont_promote)
830 tree type;
831 int keep;
832 int memory_required;
833 int dont_promote ATTRIBUTE_UNUSED;
835 enum machine_mode mode = TYPE_MODE (type);
836 #ifndef PROMOTE_FOR_CALL_ONLY
837 int unsignedp = TREE_UNSIGNED (type);
838 #endif
840 if (mode == BLKmode || memory_required)
842 HOST_WIDE_INT size = int_size_in_bytes (type);
843 rtx tmp;
845 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
846 problems with allocating the stack space. */
847 if (size == 0)
848 size = 1;
850 /* Unfortunately, we don't yet know how to allocate variable-sized
851 temporaries. However, sometimes we have a fixed upper limit on
852 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
853 instead. This is the case for Chill variable-sized strings. */
854 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
855 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
856 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
857 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
859 tmp = assign_stack_temp_for_type (mode, size, keep, type);
860 return tmp;
863 #ifndef PROMOTE_FOR_CALL_ONLY
864 if (! dont_promote)
865 mode = promote_mode (type, mode, &unsignedp, 0);
866 #endif
868 return gen_reg_rtx (mode);
871 /* Combine temporary stack slots which are adjacent on the stack.
873 This allows for better use of already allocated stack space. This is only
874 done for BLKmode slots because we can be sure that we won't have alignment
875 problems in this case. */
877 void
878 combine_temp_slots ()
880 struct temp_slot *p, *q;
881 struct temp_slot *prev_p, *prev_q;
882 int num_slots;
884 /* We can't combine slots, because the information about which slot
885 is in which alias set will be lost. */
886 if (flag_strict_aliasing)
887 return;
889 /* If there are a lot of temp slots, don't do anything unless
890 high levels of optimizaton. */
891 if (! flag_expensive_optimizations)
892 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
893 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
894 return;
896 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
898 int delete_p = 0;
900 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
901 for (q = p->next, prev_q = p; q; q = prev_q->next)
903 int delete_q = 0;
904 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
906 if (p->base_offset + p->full_size == q->base_offset)
908 /* Q comes after P; combine Q into P. */
909 p->size += q->size;
910 p->full_size += q->full_size;
911 delete_q = 1;
913 else if (q->base_offset + q->full_size == p->base_offset)
915 /* P comes after Q; combine P into Q. */
916 q->size += p->size;
917 q->full_size += p->full_size;
918 delete_p = 1;
919 break;
922 /* Either delete Q or advance past it. */
923 if (delete_q)
924 prev_q->next = q->next;
925 else
926 prev_q = q;
928 /* Either delete P or advance past it. */
929 if (delete_p)
931 if (prev_p)
932 prev_p->next = p->next;
933 else
934 temp_slots = p->next;
936 else
937 prev_p = p;
941 /* Find the temp slot corresponding to the object at address X. */
943 static struct temp_slot *
944 find_temp_slot_from_address (x)
945 rtx x;
947 struct temp_slot *p;
948 rtx next;
950 for (p = temp_slots; p; p = p->next)
952 if (! p->in_use)
953 continue;
955 else if (XEXP (p->slot, 0) == x
956 || p->address == x
957 || (GET_CODE (x) == PLUS
958 && XEXP (x, 0) == virtual_stack_vars_rtx
959 && GET_CODE (XEXP (x, 1)) == CONST_INT
960 && INTVAL (XEXP (x, 1)) >= p->base_offset
961 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
962 return p;
964 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
965 for (next = p->address; next; next = XEXP (next, 1))
966 if (XEXP (next, 0) == x)
967 return p;
970 /* If we have a sum involving a register, see if it points to a temp
971 slot. */
972 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
973 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
974 return p;
975 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
976 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
977 return p;
979 return 0;
982 /* Indicate that NEW is an alternate way of referring to the temp slot
983 that previously was known by OLD. */
985 void
986 update_temp_slot_address (old, new)
987 rtx old, new;
989 struct temp_slot *p;
991 if (rtx_equal_p (old, new))
992 return;
994 p = find_temp_slot_from_address (old);
996 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
997 is a register, see if one operand of the PLUS is a temporary
998 location. If so, NEW points into it. Otherwise, if both OLD and
999 NEW are a PLUS and if there is a register in common between them.
1000 If so, try a recursive call on those values. */
1001 if (p == 0)
1003 if (GET_CODE (old) != PLUS)
1004 return;
1006 if (GET_CODE (new) == REG)
1008 update_temp_slot_address (XEXP (old, 0), new);
1009 update_temp_slot_address (XEXP (old, 1), new);
1010 return;
1012 else if (GET_CODE (new) != PLUS)
1013 return;
1015 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1016 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1017 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1018 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1019 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1020 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1021 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1022 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1024 return;
1027 /* Otherwise add an alias for the temp's address. */
1028 else if (p->address == 0)
1029 p->address = new;
1030 else
1032 if (GET_CODE (p->address) != EXPR_LIST)
1033 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1035 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1039 /* If X could be a reference to a temporary slot, mark the fact that its
1040 address was taken. */
1042 void
1043 mark_temp_addr_taken (x)
1044 rtx x;
1046 struct temp_slot *p;
1048 if (x == 0)
1049 return;
1051 /* If X is not in memory or is at a constant address, it cannot be in
1052 a temporary slot. */
1053 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1054 return;
1056 p = find_temp_slot_from_address (XEXP (x, 0));
1057 if (p != 0)
1058 p->addr_taken = 1;
1061 /* If X could be a reference to a temporary slot, mark that slot as
1062 belonging to the to one level higher than the current level. If X
1063 matched one of our slots, just mark that one. Otherwise, we can't
1064 easily predict which it is, so upgrade all of them. Kept slots
1065 need not be touched.
1067 This is called when an ({...}) construct occurs and a statement
1068 returns a value in memory. */
1070 void
1071 preserve_temp_slots (x)
1072 rtx x;
1074 struct temp_slot *p = 0;
1076 /* If there is no result, we still might have some objects whose address
1077 were taken, so we need to make sure they stay around. */
1078 if (x == 0)
1080 for (p = temp_slots; p; p = p->next)
1081 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1082 p->level--;
1084 return;
1087 /* If X is a register that is being used as a pointer, see if we have
1088 a temporary slot we know it points to. To be consistent with
1089 the code below, we really should preserve all non-kept slots
1090 if we can't find a match, but that seems to be much too costly. */
1091 if (GET_CODE (x) == REG && REG_POINTER (x))
1092 p = find_temp_slot_from_address (x);
1094 /* If X is not in memory or is at a constant address, it cannot be in
1095 a temporary slot, but it can contain something whose address was
1096 taken. */
1097 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1099 for (p = temp_slots; p; p = p->next)
1100 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1101 p->level--;
1103 return;
1106 /* First see if we can find a match. */
1107 if (p == 0)
1108 p = find_temp_slot_from_address (XEXP (x, 0));
1110 if (p != 0)
1112 /* Move everything at our level whose address was taken to our new
1113 level in case we used its address. */
1114 struct temp_slot *q;
1116 if (p->level == temp_slot_level)
1118 for (q = temp_slots; q; q = q->next)
1119 if (q != p && q->addr_taken && q->level == p->level)
1120 q->level--;
1122 p->level--;
1123 p->addr_taken = 0;
1125 return;
1128 /* Otherwise, preserve all non-kept slots at this level. */
1129 for (p = temp_slots; p; p = p->next)
1130 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1131 p->level--;
1134 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1135 with that RTL_EXPR, promote it into a temporary slot at the present
1136 level so it will not be freed when we free slots made in the
1137 RTL_EXPR. */
1139 void
1140 preserve_rtl_expr_result (x)
1141 rtx x;
1143 struct temp_slot *p;
1145 /* If X is not in memory or is at a constant address, it cannot be in
1146 a temporary slot. */
1147 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1148 return;
1150 /* If we can find a match, move it to our level unless it is already at
1151 an upper level. */
1152 p = find_temp_slot_from_address (XEXP (x, 0));
1153 if (p != 0)
1155 p->level = MIN (p->level, temp_slot_level);
1156 p->rtl_expr = 0;
1159 return;
1162 /* Free all temporaries used so far. This is normally called at the end
1163 of generating code for a statement. Don't free any temporaries
1164 currently in use for an RTL_EXPR that hasn't yet been emitted.
1165 We could eventually do better than this since it can be reused while
1166 generating the same RTL_EXPR, but this is complex and probably not
1167 worthwhile. */
1169 void
1170 free_temp_slots ()
1172 struct temp_slot *p;
1174 for (p = temp_slots; p; p = p->next)
1175 if (p->in_use && p->level == temp_slot_level && ! p->keep
1176 && p->rtl_expr == 0)
1177 p->in_use = 0;
1179 combine_temp_slots ();
1182 /* Free all temporary slots used in T, an RTL_EXPR node. */
1184 void
1185 free_temps_for_rtl_expr (t)
1186 tree t;
1188 struct temp_slot *p;
1190 for (p = temp_slots; p; p = p->next)
1191 if (p->rtl_expr == t)
1193 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1194 needs to be preserved. This can happen if a temporary in
1195 the RTL_EXPR was addressed; preserve_temp_slots will move
1196 the temporary into a higher level. */
1197 if (temp_slot_level <= p->level)
1198 p->in_use = 0;
1199 else
1200 p->rtl_expr = NULL_TREE;
1203 combine_temp_slots ();
1206 /* Mark all temporaries ever allocated in this function as not suitable
1207 for reuse until the current level is exited. */
1209 void
1210 mark_all_temps_used ()
1212 struct temp_slot *p;
1214 for (p = temp_slots; p; p = p->next)
1216 p->in_use = p->keep = 1;
1217 p->level = MIN (p->level, temp_slot_level);
1221 /* Push deeper into the nesting level for stack temporaries. */
1223 void
1224 push_temp_slots ()
1226 temp_slot_level++;
1229 /* Likewise, but save the new level as the place to allocate variables
1230 for blocks. */
1232 #if 0
1233 void
1234 push_temp_slots_for_block ()
1236 push_temp_slots ();
1238 var_temp_slot_level = temp_slot_level;
1241 /* Likewise, but save the new level as the place to allocate temporaries
1242 for TARGET_EXPRs. */
1244 void
1245 push_temp_slots_for_target ()
1247 push_temp_slots ();
1249 target_temp_slot_level = temp_slot_level;
1252 /* Set and get the value of target_temp_slot_level. The only
1253 permitted use of these functions is to save and restore this value. */
1256 get_target_temp_slot_level ()
1258 return target_temp_slot_level;
1261 void
1262 set_target_temp_slot_level (level)
1263 int level;
1265 target_temp_slot_level = level;
1267 #endif
1269 /* Pop a temporary nesting level. All slots in use in the current level
1270 are freed. */
1272 void
1273 pop_temp_slots ()
1275 struct temp_slot *p;
1277 for (p = temp_slots; p; p = p->next)
1278 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1279 p->in_use = 0;
1281 combine_temp_slots ();
1283 temp_slot_level--;
1286 /* Initialize temporary slots. */
1288 void
1289 init_temp_slots ()
1291 /* We have not allocated any temporaries yet. */
1292 temp_slots = 0;
1293 temp_slot_level = 0;
1294 var_temp_slot_level = 0;
1295 target_temp_slot_level = 0;
1298 /* Retroactively move an auto variable from a register to a stack slot.
1299 This is done when an address-reference to the variable is seen. */
1301 void
1302 put_var_into_stack (decl)
1303 tree decl;
1305 rtx reg;
1306 enum machine_mode promoted_mode, decl_mode;
1307 struct function *function = 0;
1308 tree context;
1309 int can_use_addressof;
1310 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1311 int usedp = (TREE_USED (decl)
1312 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1314 context = decl_function_context (decl);
1316 /* Get the current rtl used for this object and its original mode. */
1317 reg = (TREE_CODE (decl) == SAVE_EXPR
1318 ? SAVE_EXPR_RTL (decl)
1319 : DECL_RTL_IF_SET (decl));
1321 /* No need to do anything if decl has no rtx yet
1322 since in that case caller is setting TREE_ADDRESSABLE
1323 and a stack slot will be assigned when the rtl is made. */
1324 if (reg == 0)
1325 return;
1327 /* Get the declared mode for this object. */
1328 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1329 : DECL_MODE (decl));
1330 /* Get the mode it's actually stored in. */
1331 promoted_mode = GET_MODE (reg);
1333 /* If this variable comes from an outer function, find that
1334 function's saved context. Don't use find_function_data here,
1335 because it might not be in any active function.
1336 FIXME: Is that really supposed to happen?
1337 It does in ObjC at least. */
1338 if (context != current_function_decl && context != inline_function_decl)
1339 for (function = outer_function_chain; function; function = function->outer)
1340 if (function->decl == context)
1341 break;
1343 /* If this is a variable-size object with a pseudo to address it,
1344 put that pseudo into the stack, if the var is nonlocal. */
1345 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1346 && GET_CODE (reg) == MEM
1347 && GET_CODE (XEXP (reg, 0)) == REG
1348 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1350 reg = XEXP (reg, 0);
1351 decl_mode = promoted_mode = GET_MODE (reg);
1354 can_use_addressof
1355 = (function == 0
1356 && optimize > 0
1357 /* FIXME make it work for promoted modes too */
1358 && decl_mode == promoted_mode
1359 #ifdef NON_SAVING_SETJMP
1360 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1361 #endif
1364 /* If we can't use ADDRESSOF, make sure we see through one we already
1365 generated. */
1366 if (! can_use_addressof && GET_CODE (reg) == MEM
1367 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1368 reg = XEXP (XEXP (reg, 0), 0);
1370 /* Now we should have a value that resides in one or more pseudo regs. */
1372 if (GET_CODE (reg) == REG)
1374 /* If this variable lives in the current function and we don't need
1375 to put things in the stack for the sake of setjmp, try to keep it
1376 in a register until we know we actually need the address. */
1377 if (can_use_addressof)
1378 gen_mem_addressof (reg, decl);
1379 else
1380 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1381 decl_mode, volatilep, 0, usedp, 0);
1383 else if (GET_CODE (reg) == CONCAT)
1385 /* A CONCAT contains two pseudos; put them both in the stack.
1386 We do it so they end up consecutive.
1387 We fixup references to the parts only after we fixup references
1388 to the whole CONCAT, lest we do double fixups for the latter
1389 references. */
1390 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1391 tree part_type = type_for_mode (part_mode, 0);
1392 rtx lopart = XEXP (reg, 0);
1393 rtx hipart = XEXP (reg, 1);
1394 #ifdef FRAME_GROWS_DOWNWARD
1395 /* Since part 0 should have a lower address, do it second. */
1396 put_reg_into_stack (function, hipart, part_type, part_mode,
1397 part_mode, volatilep, 0, 0, 0);
1398 put_reg_into_stack (function, lopart, part_type, part_mode,
1399 part_mode, volatilep, 0, 0, 0);
1400 #else
1401 put_reg_into_stack (function, lopart, part_type, part_mode,
1402 part_mode, volatilep, 0, 0, 0);
1403 put_reg_into_stack (function, hipart, part_type, part_mode,
1404 part_mode, volatilep, 0, 0, 0);
1405 #endif
1407 /* Change the CONCAT into a combined MEM for both parts. */
1408 PUT_CODE (reg, MEM);
1409 MEM_ATTRS (reg) = 0;
1411 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1412 already computed alias sets. Here we want to re-generate. */
1413 if (DECL_P (decl))
1414 SET_DECL_RTL (decl, NULL);
1415 set_mem_attributes (reg, decl, 1);
1416 if (DECL_P (decl))
1417 SET_DECL_RTL (decl, reg);
1419 /* The two parts are in memory order already.
1420 Use the lower parts address as ours. */
1421 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1422 /* Prevent sharing of rtl that might lose. */
1423 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1424 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1425 if (usedp)
1427 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1428 promoted_mode, 0);
1429 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1430 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1433 else
1434 return;
1436 if (current_function_check_memory_usage)
1437 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1438 3, XEXP (reg, 0), Pmode,
1439 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1440 TYPE_MODE (sizetype),
1441 GEN_INT (MEMORY_USE_RW),
1442 TYPE_MODE (integer_type_node));
1445 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1446 into the stack frame of FUNCTION (0 means the current function).
1447 DECL_MODE is the machine mode of the user-level data type.
1448 PROMOTED_MODE is the machine mode of the register.
1449 VOLATILE_P is nonzero if this is for a "volatile" decl.
1450 USED_P is nonzero if this reg might have already been used in an insn. */
1452 static void
1453 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1454 original_regno, used_p, ht)
1455 struct function *function;
1456 rtx reg;
1457 tree type;
1458 enum machine_mode promoted_mode, decl_mode;
1459 int volatile_p;
1460 unsigned int original_regno;
1461 int used_p;
1462 struct hash_table *ht;
1464 struct function *func = function ? function : cfun;
1465 rtx new = 0;
1466 unsigned int regno = original_regno;
1468 if (regno == 0)
1469 regno = REGNO (reg);
1471 if (regno < func->x_max_parm_reg)
1472 new = func->x_parm_reg_stack_loc[regno];
1474 if (new == 0)
1475 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1477 PUT_CODE (reg, MEM);
1478 PUT_MODE (reg, decl_mode);
1479 XEXP (reg, 0) = XEXP (new, 0);
1480 MEM_ATTRS (reg) = 0;
1481 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1482 MEM_VOLATILE_P (reg) = volatile_p;
1484 /* If this is a memory ref that contains aggregate components,
1485 mark it as such for cse and loop optimize. If we are reusing a
1486 previously generated stack slot, then we need to copy the bit in
1487 case it was set for other reasons. For instance, it is set for
1488 __builtin_va_alist. */
1489 if (type)
1491 MEM_SET_IN_STRUCT_P (reg,
1492 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1493 set_mem_alias_set (reg, get_alias_set (type));
1496 if (used_p)
1497 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1500 /* Make sure that all refs to the variable, previously made
1501 when it was a register, are fixed up to be valid again.
1502 See function above for meaning of arguments. */
1504 static void
1505 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1506 struct function *function;
1507 rtx reg;
1508 tree type;
1509 enum machine_mode promoted_mode;
1510 struct hash_table *ht;
1512 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1514 if (function != 0)
1516 struct var_refs_queue *temp;
1518 temp
1519 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1520 temp->modified = reg;
1521 temp->promoted_mode = promoted_mode;
1522 temp->unsignedp = unsigned_p;
1523 temp->next = function->fixup_var_refs_queue;
1524 function->fixup_var_refs_queue = temp;
1526 else
1527 /* Variable is local; fix it up now. */
1528 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1531 static void
1532 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1533 rtx var;
1534 enum machine_mode promoted_mode;
1535 int unsignedp;
1536 struct hash_table *ht;
1538 tree pending;
1539 rtx first_insn = get_insns ();
1540 struct sequence_stack *stack = seq_stack;
1541 tree rtl_exps = rtl_expr_chain;
1543 /* If there's a hash table, it must record all uses of VAR. */
1544 if (ht)
1546 if (stack != 0)
1547 abort ();
1548 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1549 return;
1552 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1553 stack == 0);
1555 /* Scan all pending sequences too. */
1556 for (; stack; stack = stack->next)
1558 push_to_full_sequence (stack->first, stack->last);
1559 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1560 stack->next != 0);
1561 /* Update remembered end of sequence
1562 in case we added an insn at the end. */
1563 stack->last = get_last_insn ();
1564 end_sequence ();
1567 /* Scan all waiting RTL_EXPRs too. */
1568 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1570 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1571 if (seq != const0_rtx && seq != 0)
1573 push_to_sequence (seq);
1574 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1575 end_sequence ();
1580 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1581 some part of an insn. Return a struct fixup_replacement whose OLD
1582 value is equal to X. Allocate a new structure if no such entry exists. */
1584 static struct fixup_replacement *
1585 find_fixup_replacement (replacements, x)
1586 struct fixup_replacement **replacements;
1587 rtx x;
1589 struct fixup_replacement *p;
1591 /* See if we have already replaced this. */
1592 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1595 if (p == 0)
1597 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1598 p->old = x;
1599 p->new = 0;
1600 p->next = *replacements;
1601 *replacements = p;
1604 return p;
1607 /* Scan the insn-chain starting with INSN for refs to VAR
1608 and fix them up. TOPLEVEL is nonzero if this chain is the
1609 main chain of insns for the current function. */
1611 static void
1612 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1613 rtx insn;
1614 rtx var;
1615 enum machine_mode promoted_mode;
1616 int unsignedp;
1617 int toplevel;
1619 while (insn)
1621 /* fixup_var_refs_insn might modify insn, so save its next
1622 pointer now. */
1623 rtx next = NEXT_INSN (insn);
1625 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1626 the three sequences they (potentially) contain, and process
1627 them recursively. The CALL_INSN itself is not interesting. */
1629 if (GET_CODE (insn) == CALL_INSN
1630 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1632 int i;
1634 /* Look at the Normal call, sibling call and tail recursion
1635 sequences attached to the CALL_PLACEHOLDER. */
1636 for (i = 0; i < 3; i++)
1638 rtx seq = XEXP (PATTERN (insn), i);
1639 if (seq)
1641 push_to_sequence (seq);
1642 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1643 XEXP (PATTERN (insn), i) = get_insns ();
1644 end_sequence ();
1649 else if (INSN_P (insn))
1650 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1652 insn = next;
1656 /* Look up the insns which reference VAR in HT and fix them up. Other
1657 arguments are the same as fixup_var_refs_insns.
1659 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1660 because the hash table will point straight to the interesting insn
1661 (inside the CALL_PLACEHOLDER). */
1663 static void
1664 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1665 struct hash_table *ht;
1666 rtx var;
1667 enum machine_mode promoted_mode;
1668 int unsignedp;
1670 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1671 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1672 rtx insn_list = ime->insns;
1674 while (insn_list)
1676 rtx insn = XEXP (insn_list, 0);
1678 if (INSN_P (insn))
1679 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
1681 insn_list = XEXP (insn_list, 1);
1686 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1687 the insn under examination, VAR is the variable to fix up
1688 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1689 TOPLEVEL is nonzero if this is the main insn chain for this
1690 function. */
1692 static void
1693 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1694 rtx insn;
1695 rtx var;
1696 enum machine_mode promoted_mode;
1697 int unsignedp;
1698 int toplevel;
1700 rtx call_dest = 0;
1701 rtx set, prev, prev_set;
1702 rtx note;
1704 /* Remember the notes in case we delete the insn. */
1705 note = REG_NOTES (insn);
1707 /* If this is a CLOBBER of VAR, delete it.
1709 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1710 and REG_RETVAL notes too. */
1711 if (GET_CODE (PATTERN (insn)) == CLOBBER
1712 && (XEXP (PATTERN (insn), 0) == var
1713 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1714 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1715 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1717 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1718 /* The REG_LIBCALL note will go away since we are going to
1719 turn INSN into a NOTE, so just delete the
1720 corresponding REG_RETVAL note. */
1721 remove_note (XEXP (note, 0),
1722 find_reg_note (XEXP (note, 0), REG_RETVAL,
1723 NULL_RTX));
1725 delete_insn (insn);
1728 /* The insn to load VAR from a home in the arglist
1729 is now a no-op. When we see it, just delete it.
1730 Similarly if this is storing VAR from a register from which
1731 it was loaded in the previous insn. This will occur
1732 when an ADDRESSOF was made for an arglist slot. */
1733 else if (toplevel
1734 && (set = single_set (insn)) != 0
1735 && SET_DEST (set) == var
1736 /* If this represents the result of an insn group,
1737 don't delete the insn. */
1738 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1739 && (rtx_equal_p (SET_SRC (set), var)
1740 || (GET_CODE (SET_SRC (set)) == REG
1741 && (prev = prev_nonnote_insn (insn)) != 0
1742 && (prev_set = single_set (prev)) != 0
1743 && SET_DEST (prev_set) == SET_SRC (set)
1744 && rtx_equal_p (SET_SRC (prev_set), var))))
1746 delete_insn (insn);
1748 else
1750 struct fixup_replacement *replacements = 0;
1751 rtx next_insn = NEXT_INSN (insn);
1753 if (SMALL_REGISTER_CLASSES)
1755 /* If the insn that copies the results of a CALL_INSN
1756 into a pseudo now references VAR, we have to use an
1757 intermediate pseudo since we want the life of the
1758 return value register to be only a single insn.
1760 If we don't use an intermediate pseudo, such things as
1761 address computations to make the address of VAR valid
1762 if it is not can be placed between the CALL_INSN and INSN.
1764 To make sure this doesn't happen, we record the destination
1765 of the CALL_INSN and see if the next insn uses both that
1766 and VAR. */
1768 if (call_dest != 0 && GET_CODE (insn) == INSN
1769 && reg_mentioned_p (var, PATTERN (insn))
1770 && reg_mentioned_p (call_dest, PATTERN (insn)))
1772 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1774 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1776 PATTERN (insn) = replace_rtx (PATTERN (insn),
1777 call_dest, temp);
1780 if (GET_CODE (insn) == CALL_INSN
1781 && GET_CODE (PATTERN (insn)) == SET)
1782 call_dest = SET_DEST (PATTERN (insn));
1783 else if (GET_CODE (insn) == CALL_INSN
1784 && GET_CODE (PATTERN (insn)) == PARALLEL
1785 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1786 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1787 else
1788 call_dest = 0;
1791 /* See if we have to do anything to INSN now that VAR is in
1792 memory. If it needs to be loaded into a pseudo, use a single
1793 pseudo for the entire insn in case there is a MATCH_DUP
1794 between two operands. We pass a pointer to the head of
1795 a list of struct fixup_replacements. If fixup_var_refs_1
1796 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1797 it will record them in this list.
1799 If it allocated a pseudo for any replacement, we copy into
1800 it here. */
1802 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1803 &replacements);
1805 /* If this is last_parm_insn, and any instructions were output
1806 after it to fix it up, then we must set last_parm_insn to
1807 the last such instruction emitted. */
1808 if (insn == last_parm_insn)
1809 last_parm_insn = PREV_INSN (next_insn);
1811 while (replacements)
1813 struct fixup_replacement *next;
1815 if (GET_CODE (replacements->new) == REG)
1817 rtx insert_before;
1818 rtx seq;
1820 /* OLD might be a (subreg (mem)). */
1821 if (GET_CODE (replacements->old) == SUBREG)
1822 replacements->old
1823 = fixup_memory_subreg (replacements->old, insn, 0);
1824 else
1825 replacements->old
1826 = fixup_stack_1 (replacements->old, insn);
1828 insert_before = insn;
1830 /* If we are changing the mode, do a conversion.
1831 This might be wasteful, but combine.c will
1832 eliminate much of the waste. */
1834 if (GET_MODE (replacements->new)
1835 != GET_MODE (replacements->old))
1837 start_sequence ();
1838 convert_move (replacements->new,
1839 replacements->old, unsignedp);
1840 seq = gen_sequence ();
1841 end_sequence ();
1843 else
1844 seq = gen_move_insn (replacements->new,
1845 replacements->old);
1847 emit_insn_before (seq, insert_before);
1850 next = replacements->next;
1851 free (replacements);
1852 replacements = next;
1856 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1857 But don't touch other insns referred to by reg-notes;
1858 we will get them elsewhere. */
1859 while (note)
1861 if (GET_CODE (note) != INSN_LIST)
1862 XEXP (note, 0)
1863 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1864 note = XEXP (note, 1);
1868 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1869 See if the rtx expression at *LOC in INSN needs to be changed.
1871 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1872 contain a list of original rtx's and replacements. If we find that we need
1873 to modify this insn by replacing a memory reference with a pseudo or by
1874 making a new MEM to implement a SUBREG, we consult that list to see if
1875 we have already chosen a replacement. If none has already been allocated,
1876 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1877 or the SUBREG, as appropriate, to the pseudo. */
1879 static void
1880 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1881 rtx var;
1882 enum machine_mode promoted_mode;
1883 rtx *loc;
1884 rtx insn;
1885 struct fixup_replacement **replacements;
1887 int i;
1888 rtx x = *loc;
1889 RTX_CODE code = GET_CODE (x);
1890 const char *fmt;
1891 rtx tem, tem1;
1892 struct fixup_replacement *replacement;
1894 switch (code)
1896 case ADDRESSOF:
1897 if (XEXP (x, 0) == var)
1899 /* Prevent sharing of rtl that might lose. */
1900 rtx sub = copy_rtx (XEXP (var, 0));
1902 if (! validate_change (insn, loc, sub, 0))
1904 rtx y = gen_reg_rtx (GET_MODE (sub));
1905 rtx seq, new_insn;
1907 /* We should be able to replace with a register or all is lost.
1908 Note that we can't use validate_change to verify this, since
1909 we're not caring for replacing all dups simultaneously. */
1910 if (! validate_replace_rtx (*loc, y, insn))
1911 abort ();
1913 /* Careful! First try to recognize a direct move of the
1914 value, mimicking how things are done in gen_reload wrt
1915 PLUS. Consider what happens when insn is a conditional
1916 move instruction and addsi3 clobbers flags. */
1918 start_sequence ();
1919 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1920 seq = gen_sequence ();
1921 end_sequence ();
1923 if (recog_memoized (new_insn) < 0)
1925 /* That failed. Fall back on force_operand and hope. */
1927 start_sequence ();
1928 sub = force_operand (sub, y);
1929 if (sub != y)
1930 emit_insn (gen_move_insn (y, sub));
1931 seq = gen_sequence ();
1932 end_sequence ();
1935 #ifdef HAVE_cc0
1936 /* Don't separate setter from user. */
1937 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1938 insn = PREV_INSN (insn);
1939 #endif
1941 emit_insn_before (seq, insn);
1944 return;
1946 case MEM:
1947 if (var == x)
1949 /* If we already have a replacement, use it. Otherwise,
1950 try to fix up this address in case it is invalid. */
1952 replacement = find_fixup_replacement (replacements, var);
1953 if (replacement->new)
1955 *loc = replacement->new;
1956 return;
1959 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1961 /* Unless we are forcing memory to register or we changed the mode,
1962 we can leave things the way they are if the insn is valid. */
1964 INSN_CODE (insn) = -1;
1965 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1966 && recog_memoized (insn) >= 0)
1967 return;
1969 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1970 return;
1973 /* If X contains VAR, we need to unshare it here so that we update
1974 each occurrence separately. But all identical MEMs in one insn
1975 must be replaced with the same rtx because of the possibility of
1976 MATCH_DUPs. */
1978 if (reg_mentioned_p (var, x))
1980 replacement = find_fixup_replacement (replacements, x);
1981 if (replacement->new == 0)
1982 replacement->new = copy_most_rtx (x, var);
1984 *loc = x = replacement->new;
1985 code = GET_CODE (x);
1987 break;
1989 case REG:
1990 case CC0:
1991 case PC:
1992 case CONST_INT:
1993 case CONST:
1994 case SYMBOL_REF:
1995 case LABEL_REF:
1996 case CONST_DOUBLE:
1997 return;
1999 case SIGN_EXTRACT:
2000 case ZERO_EXTRACT:
2001 /* Note that in some cases those types of expressions are altered
2002 by optimize_bit_field, and do not survive to get here. */
2003 if (XEXP (x, 0) == var
2004 || (GET_CODE (XEXP (x, 0)) == SUBREG
2005 && SUBREG_REG (XEXP (x, 0)) == var))
2007 /* Get TEM as a valid MEM in the mode presently in the insn.
2009 We don't worry about the possibility of MATCH_DUP here; it
2010 is highly unlikely and would be tricky to handle. */
2012 tem = XEXP (x, 0);
2013 if (GET_CODE (tem) == SUBREG)
2015 if (GET_MODE_BITSIZE (GET_MODE (tem))
2016 > GET_MODE_BITSIZE (GET_MODE (var)))
2018 replacement = find_fixup_replacement (replacements, var);
2019 if (replacement->new == 0)
2020 replacement->new = gen_reg_rtx (GET_MODE (var));
2021 SUBREG_REG (tem) = replacement->new;
2023 /* The following code works only if we have a MEM, so we
2024 need to handle the subreg here. We directly substitute
2025 it assuming that a subreg must be OK here. We already
2026 scheduled a replacement to copy the mem into the
2027 subreg. */
2028 XEXP (x, 0) = tem;
2029 return;
2031 else
2032 tem = fixup_memory_subreg (tem, insn, 0);
2034 else
2035 tem = fixup_stack_1 (tem, insn);
2037 /* Unless we want to load from memory, get TEM into the proper mode
2038 for an extract from memory. This can only be done if the
2039 extract is at a constant position and length. */
2041 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2042 && GET_CODE (XEXP (x, 2)) == CONST_INT
2043 && ! mode_dependent_address_p (XEXP (tem, 0))
2044 && ! MEM_VOLATILE_P (tem))
2046 enum machine_mode wanted_mode = VOIDmode;
2047 enum machine_mode is_mode = GET_MODE (tem);
2048 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2050 if (GET_CODE (x) == ZERO_EXTRACT)
2052 enum machine_mode new_mode
2053 = mode_for_extraction (EP_extzv, 1);
2054 if (new_mode != MAX_MACHINE_MODE)
2055 wanted_mode = new_mode;
2057 else if (GET_CODE (x) == SIGN_EXTRACT)
2059 enum machine_mode new_mode
2060 = mode_for_extraction (EP_extv, 1);
2061 if (new_mode != MAX_MACHINE_MODE)
2062 wanted_mode = new_mode;
2065 /* If we have a narrower mode, we can do something. */
2066 if (wanted_mode != VOIDmode
2067 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2069 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2070 rtx old_pos = XEXP (x, 2);
2071 rtx newmem;
2073 /* If the bytes and bits are counted differently, we
2074 must adjust the offset. */
2075 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2076 offset = (GET_MODE_SIZE (is_mode)
2077 - GET_MODE_SIZE (wanted_mode) - offset);
2079 pos %= GET_MODE_BITSIZE (wanted_mode);
2081 newmem = adjust_address_nv (tem, wanted_mode, offset);
2083 /* Make the change and see if the insn remains valid. */
2084 INSN_CODE (insn) = -1;
2085 XEXP (x, 0) = newmem;
2086 XEXP (x, 2) = GEN_INT (pos);
2088 if (recog_memoized (insn) >= 0)
2089 return;
2091 /* Otherwise, restore old position. XEXP (x, 0) will be
2092 restored later. */
2093 XEXP (x, 2) = old_pos;
2097 /* If we get here, the bitfield extract insn can't accept a memory
2098 reference. Copy the input into a register. */
2100 tem1 = gen_reg_rtx (GET_MODE (tem));
2101 emit_insn_before (gen_move_insn (tem1, tem), insn);
2102 XEXP (x, 0) = tem1;
2103 return;
2105 break;
2107 case SUBREG:
2108 if (SUBREG_REG (x) == var)
2110 /* If this is a special SUBREG made because VAR was promoted
2111 from a wider mode, replace it with VAR and call ourself
2112 recursively, this time saying that the object previously
2113 had its current mode (by virtue of the SUBREG). */
2115 if (SUBREG_PROMOTED_VAR_P (x))
2117 *loc = var;
2118 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2119 return;
2122 /* If this SUBREG makes VAR wider, it has become a paradoxical
2123 SUBREG with VAR in memory, but these aren't allowed at this
2124 stage of the compilation. So load VAR into a pseudo and take
2125 a SUBREG of that pseudo. */
2126 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2128 replacement = find_fixup_replacement (replacements, var);
2129 if (replacement->new == 0)
2130 replacement->new = gen_reg_rtx (promoted_mode);
2131 SUBREG_REG (x) = replacement->new;
2132 return;
2135 /* See if we have already found a replacement for this SUBREG.
2136 If so, use it. Otherwise, make a MEM and see if the insn
2137 is recognized. If not, or if we should force MEM into a register,
2138 make a pseudo for this SUBREG. */
2139 replacement = find_fixup_replacement (replacements, x);
2140 if (replacement->new)
2142 *loc = replacement->new;
2143 return;
2146 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2148 INSN_CODE (insn) = -1;
2149 if (! flag_force_mem && recog_memoized (insn) >= 0)
2150 return;
2152 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2153 return;
2155 break;
2157 case SET:
2158 /* First do special simplification of bit-field references. */
2159 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2160 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2161 optimize_bit_field (x, insn, 0);
2162 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2163 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2164 optimize_bit_field (x, insn, 0);
2166 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2167 into a register and then store it back out. */
2168 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2169 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2170 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2171 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2172 > GET_MODE_SIZE (GET_MODE (var))))
2174 replacement = find_fixup_replacement (replacements, var);
2175 if (replacement->new == 0)
2176 replacement->new = gen_reg_rtx (GET_MODE (var));
2178 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2179 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2182 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2183 insn into a pseudo and store the low part of the pseudo into VAR. */
2184 if (GET_CODE (SET_DEST (x)) == SUBREG
2185 && SUBREG_REG (SET_DEST (x)) == var
2186 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2187 > GET_MODE_SIZE (GET_MODE (var))))
2189 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2190 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2191 tem)),
2192 insn);
2193 break;
2197 rtx dest = SET_DEST (x);
2198 rtx src = SET_SRC (x);
2199 rtx outerdest = dest;
2201 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2202 || GET_CODE (dest) == SIGN_EXTRACT
2203 || GET_CODE (dest) == ZERO_EXTRACT)
2204 dest = XEXP (dest, 0);
2206 if (GET_CODE (src) == SUBREG)
2207 src = SUBREG_REG (src);
2209 /* If VAR does not appear at the top level of the SET
2210 just scan the lower levels of the tree. */
2212 if (src != var && dest != var)
2213 break;
2215 /* We will need to rerecognize this insn. */
2216 INSN_CODE (insn) = -1;
2218 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2219 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2221 /* Since this case will return, ensure we fixup all the
2222 operands here. */
2223 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2224 insn, replacements);
2225 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2226 insn, replacements);
2227 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2228 insn, replacements);
2230 tem = XEXP (outerdest, 0);
2232 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2233 that may appear inside a ZERO_EXTRACT.
2234 This was legitimate when the MEM was a REG. */
2235 if (GET_CODE (tem) == SUBREG
2236 && SUBREG_REG (tem) == var)
2237 tem = fixup_memory_subreg (tem, insn, 0);
2238 else
2239 tem = fixup_stack_1 (tem, insn);
2241 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2242 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2243 && ! mode_dependent_address_p (XEXP (tem, 0))
2244 && ! MEM_VOLATILE_P (tem))
2246 enum machine_mode wanted_mode;
2247 enum machine_mode is_mode = GET_MODE (tem);
2248 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2250 wanted_mode = mode_for_extraction (EP_insv, 0);
2252 /* If we have a narrower mode, we can do something. */
2253 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2255 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2256 rtx old_pos = XEXP (outerdest, 2);
2257 rtx newmem;
2259 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2260 offset = (GET_MODE_SIZE (is_mode)
2261 - GET_MODE_SIZE (wanted_mode) - offset);
2263 pos %= GET_MODE_BITSIZE (wanted_mode);
2265 newmem = adjust_address_nv (tem, wanted_mode, offset);
2267 /* Make the change and see if the insn remains valid. */
2268 INSN_CODE (insn) = -1;
2269 XEXP (outerdest, 0) = newmem;
2270 XEXP (outerdest, 2) = GEN_INT (pos);
2272 if (recog_memoized (insn) >= 0)
2273 return;
2275 /* Otherwise, restore old position. XEXP (x, 0) will be
2276 restored later. */
2277 XEXP (outerdest, 2) = old_pos;
2281 /* If we get here, the bit-field store doesn't allow memory
2282 or isn't located at a constant position. Load the value into
2283 a register, do the store, and put it back into memory. */
2285 tem1 = gen_reg_rtx (GET_MODE (tem));
2286 emit_insn_before (gen_move_insn (tem1, tem), insn);
2287 emit_insn_after (gen_move_insn (tem, tem1), insn);
2288 XEXP (outerdest, 0) = tem1;
2289 return;
2292 /* STRICT_LOW_PART is a no-op on memory references
2293 and it can cause combinations to be unrecognizable,
2294 so eliminate it. */
2296 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2297 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2299 /* A valid insn to copy VAR into or out of a register
2300 must be left alone, to avoid an infinite loop here.
2301 If the reference to VAR is by a subreg, fix that up,
2302 since SUBREG is not valid for a memref.
2303 Also fix up the address of the stack slot.
2305 Note that we must not try to recognize the insn until
2306 after we know that we have valid addresses and no
2307 (subreg (mem ...) ...) constructs, since these interfere
2308 with determining the validity of the insn. */
2310 if ((SET_SRC (x) == var
2311 || (GET_CODE (SET_SRC (x)) == SUBREG
2312 && SUBREG_REG (SET_SRC (x)) == var))
2313 && (GET_CODE (SET_DEST (x)) == REG
2314 || (GET_CODE (SET_DEST (x)) == SUBREG
2315 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2316 && GET_MODE (var) == promoted_mode
2317 && x == single_set (insn))
2319 rtx pat, last;
2321 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2322 if (replacement->new)
2323 SET_SRC (x) = replacement->new;
2324 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2325 SET_SRC (x) = replacement->new
2326 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2327 else
2328 SET_SRC (x) = replacement->new
2329 = fixup_stack_1 (SET_SRC (x), insn);
2331 if (recog_memoized (insn) >= 0)
2332 return;
2334 /* INSN is not valid, but we know that we want to
2335 copy SET_SRC (x) to SET_DEST (x) in some way. So
2336 we generate the move and see whether it requires more
2337 than one insn. If it does, we emit those insns and
2338 delete INSN. Otherwise, we an just replace the pattern
2339 of INSN; we have already verified above that INSN has
2340 no other function that to do X. */
2342 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2343 if (GET_CODE (pat) == SEQUENCE)
2345 last = emit_insn_before (pat, insn);
2347 /* INSN might have REG_RETVAL or other important notes, so
2348 we need to store the pattern of the last insn in the
2349 sequence into INSN similarly to the normal case. LAST
2350 should not have REG_NOTES, but we allow them if INSN has
2351 no REG_NOTES. */
2352 if (REG_NOTES (last) && REG_NOTES (insn))
2353 abort ();
2354 if (REG_NOTES (last))
2355 REG_NOTES (insn) = REG_NOTES (last);
2356 PATTERN (insn) = PATTERN (last);
2358 delete_insn (last);
2360 else
2361 PATTERN (insn) = pat;
2363 return;
2366 if ((SET_DEST (x) == var
2367 || (GET_CODE (SET_DEST (x)) == SUBREG
2368 && SUBREG_REG (SET_DEST (x)) == var))
2369 && (GET_CODE (SET_SRC (x)) == REG
2370 || (GET_CODE (SET_SRC (x)) == SUBREG
2371 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2372 && GET_MODE (var) == promoted_mode
2373 && x == single_set (insn))
2375 rtx pat, last;
2377 if (GET_CODE (SET_DEST (x)) == SUBREG)
2378 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2379 else
2380 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2382 if (recog_memoized (insn) >= 0)
2383 return;
2385 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2386 if (GET_CODE (pat) == SEQUENCE)
2388 last = emit_insn_before (pat, insn);
2390 /* INSN might have REG_RETVAL or other important notes, so
2391 we need to store the pattern of the last insn in the
2392 sequence into INSN similarly to the normal case. LAST
2393 should not have REG_NOTES, but we allow them if INSN has
2394 no REG_NOTES. */
2395 if (REG_NOTES (last) && REG_NOTES (insn))
2396 abort ();
2397 if (REG_NOTES (last))
2398 REG_NOTES (insn) = REG_NOTES (last);
2399 PATTERN (insn) = PATTERN (last);
2401 delete_insn (last);
2403 else
2404 PATTERN (insn) = pat;
2406 return;
2409 /* Otherwise, storing into VAR must be handled specially
2410 by storing into a temporary and copying that into VAR
2411 with a new insn after this one. Note that this case
2412 will be used when storing into a promoted scalar since
2413 the insn will now have different modes on the input
2414 and output and hence will be invalid (except for the case
2415 of setting it to a constant, which does not need any
2416 change if it is valid). We generate extra code in that case,
2417 but combine.c will eliminate it. */
2419 if (dest == var)
2421 rtx temp;
2422 rtx fixeddest = SET_DEST (x);
2424 /* STRICT_LOW_PART can be discarded, around a MEM. */
2425 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2426 fixeddest = XEXP (fixeddest, 0);
2427 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2428 if (GET_CODE (fixeddest) == SUBREG)
2430 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2431 promoted_mode = GET_MODE (fixeddest);
2433 else
2434 fixeddest = fixup_stack_1 (fixeddest, insn);
2436 temp = gen_reg_rtx (promoted_mode);
2438 emit_insn_after (gen_move_insn (fixeddest,
2439 gen_lowpart (GET_MODE (fixeddest),
2440 temp)),
2441 insn);
2443 SET_DEST (x) = temp;
2447 default:
2448 break;
2451 /* Nothing special about this RTX; fix its operands. */
2453 fmt = GET_RTX_FORMAT (code);
2454 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2456 if (fmt[i] == 'e')
2457 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2458 else if (fmt[i] == 'E')
2460 int j;
2461 for (j = 0; j < XVECLEN (x, i); j++)
2462 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2463 insn, replacements);
2468 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2469 return an rtx (MEM:m1 newaddr) which is equivalent.
2470 If any insns must be emitted to compute NEWADDR, put them before INSN.
2472 UNCRITICAL nonzero means accept paradoxical subregs.
2473 This is used for subregs found inside REG_NOTES. */
2475 static rtx
2476 fixup_memory_subreg (x, insn, uncritical)
2477 rtx x;
2478 rtx insn;
2479 int uncritical;
2481 int offset = SUBREG_BYTE (x);
2482 rtx addr = XEXP (SUBREG_REG (x), 0);
2483 enum machine_mode mode = GET_MODE (x);
2484 rtx result;
2486 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2487 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2488 && ! uncritical)
2489 abort ();
2491 if (!flag_force_addr
2492 && memory_address_p (mode, plus_constant (addr, offset)))
2493 /* Shortcut if no insns need be emitted. */
2494 return adjust_address (SUBREG_REG (x), mode, offset);
2496 start_sequence ();
2497 result = adjust_address (SUBREG_REG (x), mode, offset);
2498 emit_insn_before (gen_sequence (), insn);
2499 end_sequence ();
2500 return result;
2503 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2504 Replace subexpressions of X in place.
2505 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2506 Otherwise return X, with its contents possibly altered.
2508 If any insns must be emitted to compute NEWADDR, put them before INSN.
2510 UNCRITICAL is as in fixup_memory_subreg. */
2512 static rtx
2513 walk_fixup_memory_subreg (x, insn, uncritical)
2514 rtx x;
2515 rtx insn;
2516 int uncritical;
2518 enum rtx_code code;
2519 const char *fmt;
2520 int i;
2522 if (x == 0)
2523 return 0;
2525 code = GET_CODE (x);
2527 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2528 return fixup_memory_subreg (x, insn, uncritical);
2530 /* Nothing special about this RTX; fix its operands. */
2532 fmt = GET_RTX_FORMAT (code);
2533 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2535 if (fmt[i] == 'e')
2536 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2537 else if (fmt[i] == 'E')
2539 int j;
2540 for (j = 0; j < XVECLEN (x, i); j++)
2541 XVECEXP (x, i, j)
2542 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2545 return x;
2548 /* For each memory ref within X, if it refers to a stack slot
2549 with an out of range displacement, put the address in a temp register
2550 (emitting new insns before INSN to load these registers)
2551 and alter the memory ref to use that register.
2552 Replace each such MEM rtx with a copy, to avoid clobberage. */
2554 static rtx
2555 fixup_stack_1 (x, insn)
2556 rtx x;
2557 rtx insn;
2559 int i;
2560 RTX_CODE code = GET_CODE (x);
2561 const char *fmt;
2563 if (code == MEM)
2565 rtx ad = XEXP (x, 0);
2566 /* If we have address of a stack slot but it's not valid
2567 (displacement is too large), compute the sum in a register. */
2568 if (GET_CODE (ad) == PLUS
2569 && GET_CODE (XEXP (ad, 0)) == REG
2570 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2571 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2572 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2573 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2574 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2575 #endif
2576 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2577 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2578 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2579 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2581 rtx temp, seq;
2582 if (memory_address_p (GET_MODE (x), ad))
2583 return x;
2585 start_sequence ();
2586 temp = copy_to_reg (ad);
2587 seq = gen_sequence ();
2588 end_sequence ();
2589 emit_insn_before (seq, insn);
2590 return replace_equiv_address (x, temp);
2592 return x;
2595 fmt = GET_RTX_FORMAT (code);
2596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2598 if (fmt[i] == 'e')
2599 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2600 else if (fmt[i] == 'E')
2602 int j;
2603 for (j = 0; j < XVECLEN (x, i); j++)
2604 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2607 return x;
2610 /* Optimization: a bit-field instruction whose field
2611 happens to be a byte or halfword in memory
2612 can be changed to a move instruction.
2614 We call here when INSN is an insn to examine or store into a bit-field.
2615 BODY is the SET-rtx to be altered.
2617 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2618 (Currently this is called only from function.c, and EQUIV_MEM
2619 is always 0.) */
2621 static void
2622 optimize_bit_field (body, insn, equiv_mem)
2623 rtx body;
2624 rtx insn;
2625 rtx *equiv_mem;
2627 rtx bitfield;
2628 int destflag;
2629 rtx seq = 0;
2630 enum machine_mode mode;
2632 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2633 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2634 bitfield = SET_DEST (body), destflag = 1;
2635 else
2636 bitfield = SET_SRC (body), destflag = 0;
2638 /* First check that the field being stored has constant size and position
2639 and is in fact a byte or halfword suitably aligned. */
2641 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2642 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2643 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2644 != BLKmode)
2645 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2647 rtx memref = 0;
2649 /* Now check that the containing word is memory, not a register,
2650 and that it is safe to change the machine mode. */
2652 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2653 memref = XEXP (bitfield, 0);
2654 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2655 && equiv_mem != 0)
2656 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2657 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2658 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2659 memref = SUBREG_REG (XEXP (bitfield, 0));
2660 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2661 && equiv_mem != 0
2662 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2663 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2665 if (memref
2666 && ! mode_dependent_address_p (XEXP (memref, 0))
2667 && ! MEM_VOLATILE_P (memref))
2669 /* Now adjust the address, first for any subreg'ing
2670 that we are now getting rid of,
2671 and then for which byte of the word is wanted. */
2673 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2674 rtx insns;
2676 /* Adjust OFFSET to count bits from low-address byte. */
2677 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2678 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2679 - offset - INTVAL (XEXP (bitfield, 1)));
2681 /* Adjust OFFSET to count bytes from low-address byte. */
2682 offset /= BITS_PER_UNIT;
2683 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2685 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2686 / UNITS_PER_WORD) * UNITS_PER_WORD;
2687 if (BYTES_BIG_ENDIAN)
2688 offset -= (MIN (UNITS_PER_WORD,
2689 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2690 - MIN (UNITS_PER_WORD,
2691 GET_MODE_SIZE (GET_MODE (memref))));
2694 start_sequence ();
2695 memref = adjust_address (memref, mode, offset);
2696 insns = get_insns ();
2697 end_sequence ();
2698 emit_insns_before (insns, insn);
2700 /* Store this memory reference where
2701 we found the bit field reference. */
2703 if (destflag)
2705 validate_change (insn, &SET_DEST (body), memref, 1);
2706 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2708 rtx src = SET_SRC (body);
2709 while (GET_CODE (src) == SUBREG
2710 && SUBREG_BYTE (src) == 0)
2711 src = SUBREG_REG (src);
2712 if (GET_MODE (src) != GET_MODE (memref))
2713 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2714 validate_change (insn, &SET_SRC (body), src, 1);
2716 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2717 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2718 /* This shouldn't happen because anything that didn't have
2719 one of these modes should have got converted explicitly
2720 and then referenced through a subreg.
2721 This is so because the original bit-field was
2722 handled by agg_mode and so its tree structure had
2723 the same mode that memref now has. */
2724 abort ();
2726 else
2728 rtx dest = SET_DEST (body);
2730 while (GET_CODE (dest) == SUBREG
2731 && SUBREG_BYTE (dest) == 0
2732 && (GET_MODE_CLASS (GET_MODE (dest))
2733 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2734 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2735 <= UNITS_PER_WORD))
2736 dest = SUBREG_REG (dest);
2738 validate_change (insn, &SET_DEST (body), dest, 1);
2740 if (GET_MODE (dest) == GET_MODE (memref))
2741 validate_change (insn, &SET_SRC (body), memref, 1);
2742 else
2744 /* Convert the mem ref to the destination mode. */
2745 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2747 start_sequence ();
2748 convert_move (newreg, memref,
2749 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2750 seq = get_insns ();
2751 end_sequence ();
2753 validate_change (insn, &SET_SRC (body), newreg, 1);
2757 /* See if we can convert this extraction or insertion into
2758 a simple move insn. We might not be able to do so if this
2759 was, for example, part of a PARALLEL.
2761 If we succeed, write out any needed conversions. If we fail,
2762 it is hard to guess why we failed, so don't do anything
2763 special; just let the optimization be suppressed. */
2765 if (apply_change_group () && seq)
2766 emit_insns_before (seq, insn);
2771 /* These routines are responsible for converting virtual register references
2772 to the actual hard register references once RTL generation is complete.
2774 The following four variables are used for communication between the
2775 routines. They contain the offsets of the virtual registers from their
2776 respective hard registers. */
2778 static int in_arg_offset;
2779 static int var_offset;
2780 static int dynamic_offset;
2781 static int out_arg_offset;
2782 static int cfa_offset;
2784 /* In most machines, the stack pointer register is equivalent to the bottom
2785 of the stack. */
2787 #ifndef STACK_POINTER_OFFSET
2788 #define STACK_POINTER_OFFSET 0
2789 #endif
2791 /* If not defined, pick an appropriate default for the offset of dynamically
2792 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2793 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2795 #ifndef STACK_DYNAMIC_OFFSET
2797 /* The bottom of the stack points to the actual arguments. If
2798 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2799 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2800 stack space for register parameters is not pushed by the caller, but
2801 rather part of the fixed stack areas and hence not included in
2802 `current_function_outgoing_args_size'. Nevertheless, we must allow
2803 for it when allocating stack dynamic objects. */
2805 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2806 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2807 ((ACCUMULATE_OUTGOING_ARGS \
2808 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2809 + (STACK_POINTER_OFFSET)) \
2811 #else
2812 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2813 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2814 + (STACK_POINTER_OFFSET))
2815 #endif
2816 #endif
2818 /* On most machines, the CFA coincides with the first incoming parm. */
2820 #ifndef ARG_POINTER_CFA_OFFSET
2821 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2822 #endif
2824 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2825 its address taken. DECL is the decl for the object stored in the
2826 register, for later use if we do need to force REG into the stack.
2827 REG is overwritten by the MEM like in put_reg_into_stack. */
2830 gen_mem_addressof (reg, decl)
2831 rtx reg;
2832 tree decl;
2834 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2835 REGNO (reg), decl);
2837 /* Calculate this before we start messing with decl's RTL. */
2838 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2840 /* If the original REG was a user-variable, then so is the REG whose
2841 address is being taken. Likewise for unchanging. */
2842 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2843 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2845 PUT_CODE (reg, MEM);
2846 MEM_ATTRS (reg) = 0;
2847 XEXP (reg, 0) = r;
2849 if (decl)
2851 tree type = TREE_TYPE (decl);
2852 enum machine_mode decl_mode
2853 = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
2854 : DECL_MODE (decl));
2855 rtx decl_rtl = decl ? DECL_RTL_IF_SET (decl) : 0;
2857 PUT_MODE (reg, decl_mode);
2859 /* Clear DECL_RTL momentarily so functions below will work
2860 properly, then set it again. */
2861 if (decl_rtl == reg)
2862 SET_DECL_RTL (decl, 0);
2864 set_mem_attributes (reg, decl, 1);
2865 set_mem_alias_set (reg, set);
2867 if (decl_rtl == reg)
2868 SET_DECL_RTL (decl, reg);
2870 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2871 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2873 else
2874 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2876 return reg;
2879 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2881 void
2882 flush_addressof (decl)
2883 tree decl;
2885 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2886 && DECL_RTL (decl) != 0
2887 && GET_CODE (DECL_RTL (decl)) == MEM
2888 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2889 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2890 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2893 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2895 static void
2896 put_addressof_into_stack (r, ht)
2897 rtx r;
2898 struct hash_table *ht;
2900 tree decl, type;
2901 int volatile_p, used_p;
2903 rtx reg = XEXP (r, 0);
2905 if (GET_CODE (reg) != REG)
2906 abort ();
2908 decl = ADDRESSOF_DECL (r);
2909 if (decl)
2911 type = TREE_TYPE (decl);
2912 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2913 && TREE_THIS_VOLATILE (decl));
2914 used_p = (TREE_USED (decl)
2915 || (TREE_CODE (decl) != SAVE_EXPR
2916 && DECL_INITIAL (decl) != 0));
2918 else
2920 type = NULL_TREE;
2921 volatile_p = 0;
2922 used_p = 1;
2925 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2926 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2929 /* List of replacements made below in purge_addressof_1 when creating
2930 bitfield insertions. */
2931 static rtx purge_bitfield_addressof_replacements;
2933 /* List of replacements made below in purge_addressof_1 for patterns
2934 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2935 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2936 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2937 enough in complex cases, e.g. when some field values can be
2938 extracted by usage MEM with narrower mode. */
2939 static rtx purge_addressof_replacements;
2941 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2942 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2943 the stack. If the function returns FALSE then the replacement could not
2944 be made. */
2946 static bool
2947 purge_addressof_1 (loc, insn, force, store, ht)
2948 rtx *loc;
2949 rtx insn;
2950 int force, store;
2951 struct hash_table *ht;
2953 rtx x;
2954 RTX_CODE code;
2955 int i, j;
2956 const char *fmt;
2957 bool result = true;
2959 /* Re-start here to avoid recursion in common cases. */
2960 restart:
2962 x = *loc;
2963 if (x == 0)
2964 return true;
2966 code = GET_CODE (x);
2968 /* If we don't return in any of the cases below, we will recurse inside
2969 the RTX, which will normally result in any ADDRESSOF being forced into
2970 memory. */
2971 if (code == SET)
2973 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2974 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2975 return result;
2977 else if (code == ADDRESSOF)
2979 rtx sub, insns;
2981 if (GET_CODE (XEXP (x, 0)) != MEM)
2983 put_addressof_into_stack (x, ht);
2984 return true;
2987 /* We must create a copy of the rtx because it was created by
2988 overwriting a REG rtx which is always shared. */
2989 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2990 if (validate_change (insn, loc, sub, 0)
2991 || validate_replace_rtx (x, sub, insn))
2992 return true;
2994 start_sequence ();
2995 sub = force_operand (sub, NULL_RTX);
2996 if (! validate_change (insn, loc, sub, 0)
2997 && ! validate_replace_rtx (x, sub, insn))
2998 abort ();
3000 insns = gen_sequence ();
3001 end_sequence ();
3002 emit_insn_before (insns, insn);
3003 return true;
3006 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3008 rtx sub = XEXP (XEXP (x, 0), 0);
3010 if (GET_CODE (sub) == MEM)
3011 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3012 else if (GET_CODE (sub) == REG
3013 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3015 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3017 int size_x, size_sub;
3019 if (!insn)
3021 /* When processing REG_NOTES look at the list of
3022 replacements done on the insn to find the register that X
3023 was replaced by. */
3024 rtx tem;
3026 for (tem = purge_bitfield_addressof_replacements;
3027 tem != NULL_RTX;
3028 tem = XEXP (XEXP (tem, 1), 1))
3029 if (rtx_equal_p (x, XEXP (tem, 0)))
3031 *loc = XEXP (XEXP (tem, 1), 0);
3032 return true;
3035 /* See comment for purge_addressof_replacements. */
3036 for (tem = purge_addressof_replacements;
3037 tem != NULL_RTX;
3038 tem = XEXP (XEXP (tem, 1), 1))
3039 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3041 rtx z = XEXP (XEXP (tem, 1), 0);
3043 if (GET_MODE (x) == GET_MODE (z)
3044 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3045 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3046 abort ();
3048 /* It can happen that the note may speak of things
3049 in a wider (or just different) mode than the
3050 code did. This is especially true of
3051 REG_RETVAL. */
3053 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3054 z = SUBREG_REG (z);
3056 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3057 && (GET_MODE_SIZE (GET_MODE (x))
3058 > GET_MODE_SIZE (GET_MODE (z))))
3060 /* This can occur as a result in invalid
3061 pointer casts, e.g. float f; ...
3062 *(long long int *)&f.
3063 ??? We could emit a warning here, but
3064 without a line number that wouldn't be
3065 very helpful. */
3066 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3068 else
3069 z = gen_lowpart (GET_MODE (x), z);
3071 *loc = z;
3072 return true;
3075 /* Sometimes we may not be able to find the replacement. For
3076 example when the original insn was a MEM in a wider mode,
3077 and the note is part of a sign extension of a narrowed
3078 version of that MEM. Gcc testcase compile/990829-1.c can
3079 generate an example of this siutation. Rather than complain
3080 we return false, which will prompt our caller to remove the
3081 offending note. */
3082 return false;
3085 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3086 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3088 /* Don't even consider working with paradoxical subregs,
3089 or the moral equivalent seen here. */
3090 if (size_x <= size_sub
3091 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3093 /* Do a bitfield insertion to mirror what would happen
3094 in memory. */
3096 rtx val, seq;
3098 if (store)
3100 rtx p = PREV_INSN (insn);
3102 start_sequence ();
3103 val = gen_reg_rtx (GET_MODE (x));
3104 if (! validate_change (insn, loc, val, 0))
3106 /* Discard the current sequence and put the
3107 ADDRESSOF on stack. */
3108 end_sequence ();
3109 goto give_up;
3111 seq = gen_sequence ();
3112 end_sequence ();
3113 emit_insn_before (seq, insn);
3114 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3115 insn, ht);
3117 start_sequence ();
3118 store_bit_field (sub, size_x, 0, GET_MODE (x),
3119 val, GET_MODE_SIZE (GET_MODE (sub)));
3121 /* Make sure to unshare any shared rtl that store_bit_field
3122 might have created. */
3123 unshare_all_rtl_again (get_insns ());
3125 seq = gen_sequence ();
3126 end_sequence ();
3127 p = emit_insn_after (seq, insn);
3128 if (NEXT_INSN (insn))
3129 compute_insns_for_mem (NEXT_INSN (insn),
3130 p ? NEXT_INSN (p) : NULL_RTX,
3131 ht);
3133 else
3135 rtx p = PREV_INSN (insn);
3137 start_sequence ();
3138 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3139 GET_MODE (x), GET_MODE (x),
3140 GET_MODE_SIZE (GET_MODE (sub)));
3142 if (! validate_change (insn, loc, val, 0))
3144 /* Discard the current sequence and put the
3145 ADDRESSOF on stack. */
3146 end_sequence ();
3147 goto give_up;
3150 seq = gen_sequence ();
3151 end_sequence ();
3152 emit_insn_before (seq, insn);
3153 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3154 insn, ht);
3157 /* Remember the replacement so that the same one can be done
3158 on the REG_NOTES. */
3159 purge_bitfield_addressof_replacements
3160 = gen_rtx_EXPR_LIST (VOIDmode, x,
3161 gen_rtx_EXPR_LIST
3162 (VOIDmode, val,
3163 purge_bitfield_addressof_replacements));
3165 /* We replaced with a reg -- all done. */
3166 return true;
3170 else if (validate_change (insn, loc, sub, 0))
3172 /* Remember the replacement so that the same one can be done
3173 on the REG_NOTES. */
3174 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3176 rtx tem;
3178 for (tem = purge_addressof_replacements;
3179 tem != NULL_RTX;
3180 tem = XEXP (XEXP (tem, 1), 1))
3181 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3183 XEXP (XEXP (tem, 1), 0) = sub;
3184 return true;
3186 purge_addressof_replacements
3187 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3188 gen_rtx_EXPR_LIST (VOIDmode, sub,
3189 purge_addressof_replacements));
3190 return true;
3192 goto restart;
3196 give_up:
3197 /* Scan all subexpressions. */
3198 fmt = GET_RTX_FORMAT (code);
3199 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3201 if (*fmt == 'e')
3202 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3203 else if (*fmt == 'E')
3204 for (j = 0; j < XVECLEN (x, i); j++)
3205 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3208 return result;
3211 /* Return a new hash table entry in HT. */
3213 static struct hash_entry *
3214 insns_for_mem_newfunc (he, ht, k)
3215 struct hash_entry *he;
3216 struct hash_table *ht;
3217 hash_table_key k ATTRIBUTE_UNUSED;
3219 struct insns_for_mem_entry *ifmhe;
3220 if (he)
3221 return he;
3223 ifmhe = ((struct insns_for_mem_entry *)
3224 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3225 ifmhe->insns = NULL_RTX;
3227 return &ifmhe->he;
3230 /* Return a hash value for K, a REG. */
3232 static unsigned long
3233 insns_for_mem_hash (k)
3234 hash_table_key k;
3236 /* K is really a RTX. Just use the address as the hash value. */
3237 return (unsigned long) k;
3240 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3242 static bool
3243 insns_for_mem_comp (k1, k2)
3244 hash_table_key k1;
3245 hash_table_key k2;
3247 return k1 == k2;
3250 struct insns_for_mem_walk_info
3252 /* The hash table that we are using to record which INSNs use which
3253 MEMs. */
3254 struct hash_table *ht;
3256 /* The INSN we are currently proessing. */
3257 rtx insn;
3259 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3260 to find the insns that use the REGs in the ADDRESSOFs. */
3261 int pass;
3264 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3265 that might be used in an ADDRESSOF expression, record this INSN in
3266 the hash table given by DATA (which is really a pointer to an
3267 insns_for_mem_walk_info structure). */
3269 static int
3270 insns_for_mem_walk (r, data)
3271 rtx *r;
3272 void *data;
3274 struct insns_for_mem_walk_info *ifmwi
3275 = (struct insns_for_mem_walk_info *) data;
3277 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3278 && GET_CODE (XEXP (*r, 0)) == REG)
3279 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3280 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3282 /* Lookup this MEM in the hashtable, creating it if necessary. */
3283 struct insns_for_mem_entry *ifme
3284 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3286 /*create=*/0,
3287 /*copy=*/0);
3289 /* If we have not already recorded this INSN, do so now. Since
3290 we process the INSNs in order, we know that if we have
3291 recorded it it must be at the front of the list. */
3292 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3293 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3294 ifme->insns);
3297 return 0;
3300 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3301 which REGs in HT. */
3303 static void
3304 compute_insns_for_mem (insns, last_insn, ht)
3305 rtx insns;
3306 rtx last_insn;
3307 struct hash_table *ht;
3309 rtx insn;
3310 struct insns_for_mem_walk_info ifmwi;
3311 ifmwi.ht = ht;
3313 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3314 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3315 if (INSN_P (insn))
3317 ifmwi.insn = insn;
3318 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3322 /* Helper function for purge_addressof called through for_each_rtx.
3323 Returns true iff the rtl is an ADDRESSOF. */
3325 static int
3326 is_addressof (rtl, data)
3327 rtx *rtl;
3328 void *data ATTRIBUTE_UNUSED;
3330 return GET_CODE (*rtl) == ADDRESSOF;
3333 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3334 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3335 stack. */
3337 void
3338 purge_addressof (insns)
3339 rtx insns;
3341 rtx insn;
3342 struct hash_table ht;
3344 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3345 requires a fixup pass over the instruction stream to correct
3346 INSNs that depended on the REG being a REG, and not a MEM. But,
3347 these fixup passes are slow. Furthermore, most MEMs are not
3348 mentioned in very many instructions. So, we speed up the process
3349 by pre-calculating which REGs occur in which INSNs; that allows
3350 us to perform the fixup passes much more quickly. */
3351 hash_table_init (&ht,
3352 insns_for_mem_newfunc,
3353 insns_for_mem_hash,
3354 insns_for_mem_comp);
3355 compute_insns_for_mem (insns, NULL_RTX, &ht);
3357 for (insn = insns; insn; insn = NEXT_INSN (insn))
3358 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3359 || GET_CODE (insn) == CALL_INSN)
3361 if (! purge_addressof_1 (&PATTERN (insn), insn,
3362 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3363 /* If we could not replace the ADDRESSOFs in the insn,
3364 something is wrong. */
3365 abort ();
3367 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3369 /* If we could not replace the ADDRESSOFs in the insn's notes,
3370 we can just remove the offending notes instead. */
3371 rtx note;
3373 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3375 /* If we find a REG_RETVAL note then the insn is a libcall.
3376 Such insns must have REG_EQUAL notes as well, in order
3377 for later passes of the compiler to work. So it is not
3378 safe to delete the notes here, and instead we abort. */
3379 if (REG_NOTE_KIND (note) == REG_RETVAL)
3380 abort ();
3381 if (for_each_rtx (&note, is_addressof, NULL))
3382 remove_note (insn, note);
3387 /* Clean up. */
3388 hash_table_free (&ht);
3389 purge_bitfield_addressof_replacements = 0;
3390 purge_addressof_replacements = 0;
3392 /* REGs are shared. purge_addressof will destructively replace a REG
3393 with a MEM, which creates shared MEMs.
3395 Unfortunately, the children of put_reg_into_stack assume that MEMs
3396 referring to the same stack slot are shared (fixup_var_refs and
3397 the associated hash table code).
3399 So, we have to do another unsharing pass after we have flushed any
3400 REGs that had their address taken into the stack.
3402 It may be worth tracking whether or not we converted any REGs into
3403 MEMs to avoid this overhead when it is not needed. */
3404 unshare_all_rtl_again (get_insns ());
3407 /* Convert a SET of a hard subreg to a set of the appropriet hard
3408 register. A subroutine of purge_hard_subreg_sets. */
3410 static void
3411 purge_single_hard_subreg_set (pattern)
3412 rtx pattern;
3414 rtx reg = SET_DEST (pattern);
3415 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3416 int offset = 0;
3418 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3419 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3421 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3422 GET_MODE (SUBREG_REG (reg)),
3423 SUBREG_BYTE (reg),
3424 GET_MODE (reg));
3425 reg = SUBREG_REG (reg);
3429 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3431 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3432 SET_DEST (pattern) = reg;
3436 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3437 only such SETs that we expect to see are those left in because
3438 integrate can't handle sets of parts of a return value register.
3440 We don't use alter_subreg because we only want to eliminate subregs
3441 of hard registers. */
3443 void
3444 purge_hard_subreg_sets (insn)
3445 rtx insn;
3447 for (; insn; insn = NEXT_INSN (insn))
3449 if (INSN_P (insn))
3451 rtx pattern = PATTERN (insn);
3452 switch (GET_CODE (pattern))
3454 case SET:
3455 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3456 purge_single_hard_subreg_set (pattern);
3457 break;
3458 case PARALLEL:
3460 int j;
3461 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3463 rtx inner_pattern = XVECEXP (pattern, 0, j);
3464 if (GET_CODE (inner_pattern) == SET
3465 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3466 purge_single_hard_subreg_set (inner_pattern);
3469 break;
3470 default:
3471 break;
3477 /* Pass through the INSNS of function FNDECL and convert virtual register
3478 references to hard register references. */
3480 void
3481 instantiate_virtual_regs (fndecl, insns)
3482 tree fndecl;
3483 rtx insns;
3485 rtx insn;
3486 unsigned int i;
3488 /* Compute the offsets to use for this function. */
3489 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3490 var_offset = STARTING_FRAME_OFFSET;
3491 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3492 out_arg_offset = STACK_POINTER_OFFSET;
3493 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3495 /* Scan all variables and parameters of this function. For each that is
3496 in memory, instantiate all virtual registers if the result is a valid
3497 address. If not, we do it later. That will handle most uses of virtual
3498 regs on many machines. */
3499 instantiate_decls (fndecl, 1);
3501 /* Initialize recognition, indicating that volatile is OK. */
3502 init_recog ();
3504 /* Scan through all the insns, instantiating every virtual register still
3505 present. */
3506 for (insn = insns; insn; insn = NEXT_INSN (insn))
3507 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3508 || GET_CODE (insn) == CALL_INSN)
3510 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3511 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3512 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3513 if (GET_CODE (insn) == CALL_INSN)
3514 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3515 NULL_RTX, 0);
3518 /* Instantiate the stack slots for the parm registers, for later use in
3519 addressof elimination. */
3520 for (i = 0; i < max_parm_reg; ++i)
3521 if (parm_reg_stack_loc[i])
3522 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3524 /* Now instantiate the remaining register equivalences for debugging info.
3525 These will not be valid addresses. */
3526 instantiate_decls (fndecl, 0);
3528 /* Indicate that, from now on, assign_stack_local should use
3529 frame_pointer_rtx. */
3530 virtuals_instantiated = 1;
3533 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3534 all virtual registers in their DECL_RTL's.
3536 If VALID_ONLY, do this only if the resulting address is still valid.
3537 Otherwise, always do it. */
3539 static void
3540 instantiate_decls (fndecl, valid_only)
3541 tree fndecl;
3542 int valid_only;
3544 tree decl;
3546 /* Process all parameters of the function. */
3547 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3549 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3550 HOST_WIDE_INT size_rtl;
3552 instantiate_decl (DECL_RTL (decl), size, valid_only);
3554 /* If the parameter was promoted, then the incoming RTL mode may be
3555 larger than the declared type size. We must use the larger of
3556 the two sizes. */
3557 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3558 size = MAX (size_rtl, size);
3559 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3562 /* Now process all variables defined in the function or its subblocks. */
3563 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3566 /* Subroutine of instantiate_decls: Process all decls in the given
3567 BLOCK node and all its subblocks. */
3569 static void
3570 instantiate_decls_1 (let, valid_only)
3571 tree let;
3572 int valid_only;
3574 tree t;
3576 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3577 if (DECL_RTL_SET_P (t))
3578 instantiate_decl (DECL_RTL (t),
3579 int_size_in_bytes (TREE_TYPE (t)),
3580 valid_only);
3582 /* Process all subblocks. */
3583 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3584 instantiate_decls_1 (t, valid_only);
3587 /* Subroutine of the preceding procedures: Given RTL representing a
3588 decl and the size of the object, do any instantiation required.
3590 If VALID_ONLY is non-zero, it means that the RTL should only be
3591 changed if the new address is valid. */
3593 static void
3594 instantiate_decl (x, size, valid_only)
3595 rtx x;
3596 HOST_WIDE_INT size;
3597 int valid_only;
3599 enum machine_mode mode;
3600 rtx addr;
3602 /* If this is not a MEM, no need to do anything. Similarly if the
3603 address is a constant or a register that is not a virtual register. */
3605 if (x == 0 || GET_CODE (x) != MEM)
3606 return;
3608 addr = XEXP (x, 0);
3609 if (CONSTANT_P (addr)
3610 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3611 || (GET_CODE (addr) == REG
3612 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3613 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3614 return;
3616 /* If we should only do this if the address is valid, copy the address.
3617 We need to do this so we can undo any changes that might make the
3618 address invalid. This copy is unfortunate, but probably can't be
3619 avoided. */
3621 if (valid_only)
3622 addr = copy_rtx (addr);
3624 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3626 if (valid_only && size >= 0)
3628 unsigned HOST_WIDE_INT decl_size = size;
3630 /* Now verify that the resulting address is valid for every integer or
3631 floating-point mode up to and including SIZE bytes long. We do this
3632 since the object might be accessed in any mode and frame addresses
3633 are shared. */
3635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3636 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3637 mode = GET_MODE_WIDER_MODE (mode))
3638 if (! memory_address_p (mode, addr))
3639 return;
3641 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3642 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3643 mode = GET_MODE_WIDER_MODE (mode))
3644 if (! memory_address_p (mode, addr))
3645 return;
3648 /* Put back the address now that we have updated it and we either know
3649 it is valid or we don't care whether it is valid. */
3651 XEXP (x, 0) = addr;
3654 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3655 is a virtual register, return the requivalent hard register and set the
3656 offset indirectly through the pointer. Otherwise, return 0. */
3658 static rtx
3659 instantiate_new_reg (x, poffset)
3660 rtx x;
3661 HOST_WIDE_INT *poffset;
3663 rtx new;
3664 HOST_WIDE_INT offset;
3666 if (x == virtual_incoming_args_rtx)
3667 new = arg_pointer_rtx, offset = in_arg_offset;
3668 else if (x == virtual_stack_vars_rtx)
3669 new = frame_pointer_rtx, offset = var_offset;
3670 else if (x == virtual_stack_dynamic_rtx)
3671 new = stack_pointer_rtx, offset = dynamic_offset;
3672 else if (x == virtual_outgoing_args_rtx)
3673 new = stack_pointer_rtx, offset = out_arg_offset;
3674 else if (x == virtual_cfa_rtx)
3675 new = arg_pointer_rtx, offset = cfa_offset;
3676 else
3677 return 0;
3679 *poffset = offset;
3680 return new;
3683 /* Given a pointer to a piece of rtx and an optional pointer to the
3684 containing object, instantiate any virtual registers present in it.
3686 If EXTRA_INSNS, we always do the replacement and generate
3687 any extra insns before OBJECT. If it zero, we do nothing if replacement
3688 is not valid.
3690 Return 1 if we either had nothing to do or if we were able to do the
3691 needed replacement. Return 0 otherwise; we only return zero if
3692 EXTRA_INSNS is zero.
3694 We first try some simple transformations to avoid the creation of extra
3695 pseudos. */
3697 static int
3698 instantiate_virtual_regs_1 (loc, object, extra_insns)
3699 rtx *loc;
3700 rtx object;
3701 int extra_insns;
3703 rtx x;
3704 RTX_CODE code;
3705 rtx new = 0;
3706 HOST_WIDE_INT offset = 0;
3707 rtx temp;
3708 rtx seq;
3709 int i, j;
3710 const char *fmt;
3712 /* Re-start here to avoid recursion in common cases. */
3713 restart:
3715 x = *loc;
3716 if (x == 0)
3717 return 1;
3719 code = GET_CODE (x);
3721 /* Check for some special cases. */
3722 switch (code)
3724 case CONST_INT:
3725 case CONST_DOUBLE:
3726 case CONST:
3727 case SYMBOL_REF:
3728 case CODE_LABEL:
3729 case PC:
3730 case CC0:
3731 case ASM_INPUT:
3732 case ADDR_VEC:
3733 case ADDR_DIFF_VEC:
3734 case RETURN:
3735 return 1;
3737 case SET:
3738 /* We are allowed to set the virtual registers. This means that
3739 the actual register should receive the source minus the
3740 appropriate offset. This is used, for example, in the handling
3741 of non-local gotos. */
3742 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3744 rtx src = SET_SRC (x);
3746 /* We are setting the register, not using it, so the relevant
3747 offset is the negative of the offset to use were we using
3748 the register. */
3749 offset = - offset;
3750 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3752 /* The only valid sources here are PLUS or REG. Just do
3753 the simplest possible thing to handle them. */
3754 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3755 abort ();
3757 start_sequence ();
3758 if (GET_CODE (src) != REG)
3759 temp = force_operand (src, NULL_RTX);
3760 else
3761 temp = src;
3762 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3763 seq = get_insns ();
3764 end_sequence ();
3766 emit_insns_before (seq, object);
3767 SET_DEST (x) = new;
3769 if (! validate_change (object, &SET_SRC (x), temp, 0)
3770 || ! extra_insns)
3771 abort ();
3773 return 1;
3776 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3777 loc = &SET_SRC (x);
3778 goto restart;
3780 case PLUS:
3781 /* Handle special case of virtual register plus constant. */
3782 if (CONSTANT_P (XEXP (x, 1)))
3784 rtx old, new_offset;
3786 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3787 if (GET_CODE (XEXP (x, 0)) == PLUS)
3789 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3791 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3792 extra_insns);
3793 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3795 else
3797 loc = &XEXP (x, 0);
3798 goto restart;
3802 #ifdef POINTERS_EXTEND_UNSIGNED
3803 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3804 we can commute the PLUS and SUBREG because pointers into the
3805 frame are well-behaved. */
3806 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3807 && GET_CODE (XEXP (x, 1)) == CONST_INT
3808 && 0 != (new
3809 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3810 &offset))
3811 && validate_change (object, loc,
3812 plus_constant (gen_lowpart (ptr_mode,
3813 new),
3814 offset
3815 + INTVAL (XEXP (x, 1))),
3817 return 1;
3818 #endif
3819 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3821 /* We know the second operand is a constant. Unless the
3822 first operand is a REG (which has been already checked),
3823 it needs to be checked. */
3824 if (GET_CODE (XEXP (x, 0)) != REG)
3826 loc = &XEXP (x, 0);
3827 goto restart;
3829 return 1;
3832 new_offset = plus_constant (XEXP (x, 1), offset);
3834 /* If the new constant is zero, try to replace the sum with just
3835 the register. */
3836 if (new_offset == const0_rtx
3837 && validate_change (object, loc, new, 0))
3838 return 1;
3840 /* Next try to replace the register and new offset.
3841 There are two changes to validate here and we can't assume that
3842 in the case of old offset equals new just changing the register
3843 will yield a valid insn. In the interests of a little efficiency,
3844 however, we only call validate change once (we don't queue up the
3845 changes and then call apply_change_group). */
3847 old = XEXP (x, 0);
3848 if (offset == 0
3849 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3850 : (XEXP (x, 0) = new,
3851 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3853 if (! extra_insns)
3855 XEXP (x, 0) = old;
3856 return 0;
3859 /* Otherwise copy the new constant into a register and replace
3860 constant with that register. */
3861 temp = gen_reg_rtx (Pmode);
3862 XEXP (x, 0) = new;
3863 if (validate_change (object, &XEXP (x, 1), temp, 0))
3864 emit_insn_before (gen_move_insn (temp, new_offset), object);
3865 else
3867 /* If that didn't work, replace this expression with a
3868 register containing the sum. */
3870 XEXP (x, 0) = old;
3871 new = gen_rtx_PLUS (Pmode, new, new_offset);
3873 start_sequence ();
3874 temp = force_operand (new, NULL_RTX);
3875 seq = get_insns ();
3876 end_sequence ();
3878 emit_insns_before (seq, object);
3879 if (! validate_change (object, loc, temp, 0)
3880 && ! validate_replace_rtx (x, temp, object))
3881 abort ();
3885 return 1;
3888 /* Fall through to generic two-operand expression case. */
3889 case EXPR_LIST:
3890 case CALL:
3891 case COMPARE:
3892 case MINUS:
3893 case MULT:
3894 case DIV: case UDIV:
3895 case MOD: case UMOD:
3896 case AND: case IOR: case XOR:
3897 case ROTATERT: case ROTATE:
3898 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3899 case NE: case EQ:
3900 case GE: case GT: case GEU: case GTU:
3901 case LE: case LT: case LEU: case LTU:
3902 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3903 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3904 loc = &XEXP (x, 0);
3905 goto restart;
3907 case MEM:
3908 /* Most cases of MEM that convert to valid addresses have already been
3909 handled by our scan of decls. The only special handling we
3910 need here is to make a copy of the rtx to ensure it isn't being
3911 shared if we have to change it to a pseudo.
3913 If the rtx is a simple reference to an address via a virtual register,
3914 it can potentially be shared. In such cases, first try to make it
3915 a valid address, which can also be shared. Otherwise, copy it and
3916 proceed normally.
3918 First check for common cases that need no processing. These are
3919 usually due to instantiation already being done on a previous instance
3920 of a shared rtx. */
3922 temp = XEXP (x, 0);
3923 if (CONSTANT_ADDRESS_P (temp)
3924 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3925 || temp == arg_pointer_rtx
3926 #endif
3927 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3928 || temp == hard_frame_pointer_rtx
3929 #endif
3930 || temp == frame_pointer_rtx)
3931 return 1;
3933 if (GET_CODE (temp) == PLUS
3934 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3935 && (XEXP (temp, 0) == frame_pointer_rtx
3936 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3937 || XEXP (temp, 0) == hard_frame_pointer_rtx
3938 #endif
3939 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3940 || XEXP (temp, 0) == arg_pointer_rtx
3941 #endif
3943 return 1;
3945 if (temp == virtual_stack_vars_rtx
3946 || temp == virtual_incoming_args_rtx
3947 || (GET_CODE (temp) == PLUS
3948 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3949 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3950 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3952 /* This MEM may be shared. If the substitution can be done without
3953 the need to generate new pseudos, we want to do it in place
3954 so all copies of the shared rtx benefit. The call below will
3955 only make substitutions if the resulting address is still
3956 valid.
3958 Note that we cannot pass X as the object in the recursive call
3959 since the insn being processed may not allow all valid
3960 addresses. However, if we were not passed on object, we can
3961 only modify X without copying it if X will have a valid
3962 address.
3964 ??? Also note that this can still lose if OBJECT is an insn that
3965 has less restrictions on an address that some other insn.
3966 In that case, we will modify the shared address. This case
3967 doesn't seem very likely, though. One case where this could
3968 happen is in the case of a USE or CLOBBER reference, but we
3969 take care of that below. */
3971 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3972 object ? object : x, 0))
3973 return 1;
3975 /* Otherwise make a copy and process that copy. We copy the entire
3976 RTL expression since it might be a PLUS which could also be
3977 shared. */
3978 *loc = x = copy_rtx (x);
3981 /* Fall through to generic unary operation case. */
3982 case SUBREG:
3983 case STRICT_LOW_PART:
3984 case NEG: case NOT:
3985 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3986 case SIGN_EXTEND: case ZERO_EXTEND:
3987 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3988 case FLOAT: case FIX:
3989 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3990 case ABS:
3991 case SQRT:
3992 case FFS:
3993 /* These case either have just one operand or we know that we need not
3994 check the rest of the operands. */
3995 loc = &XEXP (x, 0);
3996 goto restart;
3998 case USE:
3999 case CLOBBER:
4000 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4001 go ahead and make the invalid one, but do it to a copy. For a REG,
4002 just make the recursive call, since there's no chance of a problem. */
4004 if ((GET_CODE (XEXP (x, 0)) == MEM
4005 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4007 || (GET_CODE (XEXP (x, 0)) == REG
4008 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4009 return 1;
4011 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4012 loc = &XEXP (x, 0);
4013 goto restart;
4015 case REG:
4016 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4017 in front of this insn and substitute the temporary. */
4018 if ((new = instantiate_new_reg (x, &offset)) != 0)
4020 temp = plus_constant (new, offset);
4021 if (!validate_change (object, loc, temp, 0))
4023 if (! extra_insns)
4024 return 0;
4026 start_sequence ();
4027 temp = force_operand (temp, NULL_RTX);
4028 seq = get_insns ();
4029 end_sequence ();
4031 emit_insns_before (seq, object);
4032 if (! validate_change (object, loc, temp, 0)
4033 && ! validate_replace_rtx (x, temp, object))
4034 abort ();
4038 return 1;
4040 case ADDRESSOF:
4041 if (GET_CODE (XEXP (x, 0)) == REG)
4042 return 1;
4044 else if (GET_CODE (XEXP (x, 0)) == MEM)
4046 /* If we have a (addressof (mem ..)), do any instantiation inside
4047 since we know we'll be making the inside valid when we finally
4048 remove the ADDRESSOF. */
4049 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4050 return 1;
4052 break;
4054 default:
4055 break;
4058 /* Scan all subexpressions. */
4059 fmt = GET_RTX_FORMAT (code);
4060 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4061 if (*fmt == 'e')
4063 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4064 return 0;
4066 else if (*fmt == 'E')
4067 for (j = 0; j < XVECLEN (x, i); j++)
4068 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4069 extra_insns))
4070 return 0;
4072 return 1;
4075 /* Optimization: assuming this function does not receive nonlocal gotos,
4076 delete the handlers for such, as well as the insns to establish
4077 and disestablish them. */
4079 static void
4080 delete_handlers ()
4082 rtx insn;
4083 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4085 /* Delete the handler by turning off the flag that would
4086 prevent jump_optimize from deleting it.
4087 Also permit deletion of the nonlocal labels themselves
4088 if nothing local refers to them. */
4089 if (GET_CODE (insn) == CODE_LABEL)
4091 tree t, last_t;
4093 LABEL_PRESERVE_P (insn) = 0;
4095 /* Remove it from the nonlocal_label list, to avoid confusing
4096 flow. */
4097 for (t = nonlocal_labels, last_t = 0; t;
4098 last_t = t, t = TREE_CHAIN (t))
4099 if (DECL_RTL (TREE_VALUE (t)) == insn)
4100 break;
4101 if (t)
4103 if (! last_t)
4104 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4105 else
4106 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4109 if (GET_CODE (insn) == INSN)
4111 int can_delete = 0;
4112 rtx t;
4113 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4114 if (reg_mentioned_p (t, PATTERN (insn)))
4116 can_delete = 1;
4117 break;
4119 if (can_delete
4120 || (nonlocal_goto_stack_level != 0
4121 && reg_mentioned_p (nonlocal_goto_stack_level,
4122 PATTERN (insn))))
4123 delete_related_insns (insn);
4129 max_parm_reg_num ()
4131 return max_parm_reg;
4134 /* Return the first insn following those generated by `assign_parms'. */
4137 get_first_nonparm_insn ()
4139 if (last_parm_insn)
4140 return NEXT_INSN (last_parm_insn);
4141 return get_insns ();
4144 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4145 Crash if there is none. */
4148 get_first_block_beg ()
4150 rtx searcher;
4151 rtx insn = get_first_nonparm_insn ();
4153 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4154 if (GET_CODE (searcher) == NOTE
4155 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4156 return searcher;
4158 abort (); /* Invalid call to this function. (See comments above.) */
4159 return NULL_RTX;
4162 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4163 This means a type for which function calls must pass an address to the
4164 function or get an address back from the function.
4165 EXP may be a type node or an expression (whose type is tested). */
4168 aggregate_value_p (exp)
4169 tree exp;
4171 int i, regno, nregs;
4172 rtx reg;
4174 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4176 if (TREE_CODE (type) == VOID_TYPE)
4177 return 0;
4178 if (RETURN_IN_MEMORY (type))
4179 return 1;
4180 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4181 and thus can't be returned in registers. */
4182 if (TREE_ADDRESSABLE (type))
4183 return 1;
4184 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4185 return 1;
4186 /* Make sure we have suitable call-clobbered regs to return
4187 the value in; if not, we must return it in memory. */
4188 reg = hard_function_value (type, 0, 0);
4190 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4191 it is OK. */
4192 if (GET_CODE (reg) != REG)
4193 return 0;
4195 regno = REGNO (reg);
4196 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4197 for (i = 0; i < nregs; i++)
4198 if (! call_used_regs[regno + i])
4199 return 1;
4200 return 0;
4203 /* Assign RTL expressions to the function's parameters.
4204 This may involve copying them into registers and using
4205 those registers as the RTL for them. */
4207 void
4208 assign_parms (fndecl)
4209 tree fndecl;
4211 tree parm;
4212 rtx entry_parm = 0;
4213 rtx stack_parm = 0;
4214 CUMULATIVE_ARGS args_so_far;
4215 enum machine_mode promoted_mode, passed_mode;
4216 enum machine_mode nominal_mode, promoted_nominal_mode;
4217 int unsignedp;
4218 /* Total space needed so far for args on the stack,
4219 given as a constant and a tree-expression. */
4220 struct args_size stack_args_size;
4221 tree fntype = TREE_TYPE (fndecl);
4222 tree fnargs = DECL_ARGUMENTS (fndecl);
4223 /* This is used for the arg pointer when referring to stack args. */
4224 rtx internal_arg_pointer;
4225 /* This is a dummy PARM_DECL that we used for the function result if
4226 the function returns a structure. */
4227 tree function_result_decl = 0;
4228 #ifdef SETUP_INCOMING_VARARGS
4229 int varargs_setup = 0;
4230 #endif
4231 rtx conversion_insns = 0;
4232 struct args_size alignment_pad;
4234 /* Nonzero if the last arg is named `__builtin_va_alist',
4235 which is used on some machines for old-fashioned non-ANSI varargs.h;
4236 this should be stuck onto the stack as if it had arrived there. */
4237 int hide_last_arg
4238 = (current_function_varargs
4239 && fnargs
4240 && (parm = tree_last (fnargs)) != 0
4241 && DECL_NAME (parm)
4242 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4243 "__builtin_va_alist")));
4245 /* Nonzero if function takes extra anonymous args.
4246 This means the last named arg must be on the stack
4247 right before the anonymous ones. */
4248 int stdarg
4249 = (TYPE_ARG_TYPES (fntype) != 0
4250 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4251 != void_type_node));
4253 current_function_stdarg = stdarg;
4255 /* If the reg that the virtual arg pointer will be translated into is
4256 not a fixed reg or is the stack pointer, make a copy of the virtual
4257 arg pointer, and address parms via the copy. The frame pointer is
4258 considered fixed even though it is not marked as such.
4260 The second time through, simply use ap to avoid generating rtx. */
4262 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4263 || ! (fixed_regs[ARG_POINTER_REGNUM]
4264 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4265 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4266 else
4267 internal_arg_pointer = virtual_incoming_args_rtx;
4268 current_function_internal_arg_pointer = internal_arg_pointer;
4270 stack_args_size.constant = 0;
4271 stack_args_size.var = 0;
4273 /* If struct value address is treated as the first argument, make it so. */
4274 if (aggregate_value_p (DECL_RESULT (fndecl))
4275 && ! current_function_returns_pcc_struct
4276 && struct_value_incoming_rtx == 0)
4278 tree type = build_pointer_type (TREE_TYPE (fntype));
4280 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4282 DECL_ARG_TYPE (function_result_decl) = type;
4283 TREE_CHAIN (function_result_decl) = fnargs;
4284 fnargs = function_result_decl;
4287 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4288 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4290 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4291 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4292 #else
4293 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4294 #endif
4296 /* We haven't yet found an argument that we must push and pretend the
4297 caller did. */
4298 current_function_pretend_args_size = 0;
4300 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4302 struct args_size stack_offset;
4303 struct args_size arg_size;
4304 int passed_pointer = 0;
4305 int did_conversion = 0;
4306 tree passed_type = DECL_ARG_TYPE (parm);
4307 tree nominal_type = TREE_TYPE (parm);
4308 int pretend_named;
4310 /* Set LAST_NAMED if this is last named arg before some
4311 anonymous args. */
4312 int last_named = ((TREE_CHAIN (parm) == 0
4313 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4314 && (stdarg || current_function_varargs));
4315 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4316 most machines, if this is a varargs/stdarg function, then we treat
4317 the last named arg as if it were anonymous too. */
4318 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4320 if (TREE_TYPE (parm) == error_mark_node
4321 /* This can happen after weird syntax errors
4322 or if an enum type is defined among the parms. */
4323 || TREE_CODE (parm) != PARM_DECL
4324 || passed_type == NULL)
4326 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4327 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4328 TREE_USED (parm) = 1;
4329 continue;
4332 /* For varargs.h function, save info about regs and stack space
4333 used by the individual args, not including the va_alist arg. */
4334 if (hide_last_arg && last_named)
4335 current_function_args_info = args_so_far;
4337 /* Find mode of arg as it is passed, and mode of arg
4338 as it should be during execution of this function. */
4339 passed_mode = TYPE_MODE (passed_type);
4340 nominal_mode = TYPE_MODE (nominal_type);
4342 /* If the parm's mode is VOID, its value doesn't matter,
4343 and avoid the usual things like emit_move_insn that could crash. */
4344 if (nominal_mode == VOIDmode)
4346 SET_DECL_RTL (parm, const0_rtx);
4347 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4348 continue;
4351 /* If the parm is to be passed as a transparent union, use the
4352 type of the first field for the tests below. We have already
4353 verified that the modes are the same. */
4354 if (DECL_TRANSPARENT_UNION (parm)
4355 || (TREE_CODE (passed_type) == UNION_TYPE
4356 && TYPE_TRANSPARENT_UNION (passed_type)))
4357 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4359 /* See if this arg was passed by invisible reference. It is if
4360 it is an object whose size depends on the contents of the
4361 object itself or if the machine requires these objects be passed
4362 that way. */
4364 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4365 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4366 || TREE_ADDRESSABLE (passed_type)
4367 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4368 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4369 passed_type, named_arg)
4370 #endif
4373 passed_type = nominal_type = build_pointer_type (passed_type);
4374 passed_pointer = 1;
4375 passed_mode = nominal_mode = Pmode;
4378 promoted_mode = passed_mode;
4380 #ifdef PROMOTE_FUNCTION_ARGS
4381 /* Compute the mode in which the arg is actually extended to. */
4382 unsignedp = TREE_UNSIGNED (passed_type);
4383 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4384 #endif
4386 /* Let machine desc say which reg (if any) the parm arrives in.
4387 0 means it arrives on the stack. */
4388 #ifdef FUNCTION_INCOMING_ARG
4389 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4390 passed_type, named_arg);
4391 #else
4392 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4393 passed_type, named_arg);
4394 #endif
4396 if (entry_parm == 0)
4397 promoted_mode = passed_mode;
4399 #ifdef SETUP_INCOMING_VARARGS
4400 /* If this is the last named parameter, do any required setup for
4401 varargs or stdargs. We need to know about the case of this being an
4402 addressable type, in which case we skip the registers it
4403 would have arrived in.
4405 For stdargs, LAST_NAMED will be set for two parameters, the one that
4406 is actually the last named, and the dummy parameter. We only
4407 want to do this action once.
4409 Also, indicate when RTL generation is to be suppressed. */
4410 if (last_named && !varargs_setup)
4412 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4413 current_function_pretend_args_size, 0);
4414 varargs_setup = 1;
4416 #endif
4418 /* Determine parm's home in the stack,
4419 in case it arrives in the stack or we should pretend it did.
4421 Compute the stack position and rtx where the argument arrives
4422 and its size.
4424 There is one complexity here: If this was a parameter that would
4425 have been passed in registers, but wasn't only because it is
4426 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4427 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4428 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4429 0 as it was the previous time. */
4431 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4432 locate_and_pad_parm (promoted_mode, passed_type,
4433 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4435 #else
4436 #ifdef FUNCTION_INCOMING_ARG
4437 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4438 passed_type,
4439 pretend_named) != 0,
4440 #else
4441 FUNCTION_ARG (args_so_far, promoted_mode,
4442 passed_type,
4443 pretend_named) != 0,
4444 #endif
4445 #endif
4446 fndecl, &stack_args_size, &stack_offset, &arg_size,
4447 &alignment_pad);
4450 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4452 if (offset_rtx == const0_rtx)
4453 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4454 else
4455 stack_parm = gen_rtx_MEM (promoted_mode,
4456 gen_rtx_PLUS (Pmode,
4457 internal_arg_pointer,
4458 offset_rtx));
4460 set_mem_attributes (stack_parm, parm, 1);
4463 /* If this parameter was passed both in registers and in the stack,
4464 use the copy on the stack. */
4465 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4466 entry_parm = 0;
4468 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4469 /* If this parm was passed part in regs and part in memory,
4470 pretend it arrived entirely in memory
4471 by pushing the register-part onto the stack.
4473 In the special case of a DImode or DFmode that is split,
4474 we could put it together in a pseudoreg directly,
4475 but for now that's not worth bothering with. */
4477 if (entry_parm)
4479 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4480 passed_type, named_arg);
4482 if (nregs > 0)
4484 current_function_pretend_args_size
4485 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4486 / (PARM_BOUNDARY / BITS_PER_UNIT)
4487 * (PARM_BOUNDARY / BITS_PER_UNIT));
4489 /* Handle calls that pass values in multiple non-contiguous
4490 locations. The Irix 6 ABI has examples of this. */
4491 if (GET_CODE (entry_parm) == PARALLEL)
4492 emit_group_store (validize_mem (stack_parm), entry_parm,
4493 int_size_in_bytes (TREE_TYPE (parm)));
4495 else
4496 move_block_from_reg (REGNO (entry_parm),
4497 validize_mem (stack_parm), nregs,
4498 int_size_in_bytes (TREE_TYPE (parm)));
4500 entry_parm = stack_parm;
4503 #endif
4505 /* If we didn't decide this parm came in a register,
4506 by default it came on the stack. */
4507 if (entry_parm == 0)
4508 entry_parm = stack_parm;
4510 /* Record permanently how this parm was passed. */
4511 DECL_INCOMING_RTL (parm) = entry_parm;
4513 /* If there is actually space on the stack for this parm,
4514 count it in stack_args_size; otherwise set stack_parm to 0
4515 to indicate there is no preallocated stack slot for the parm. */
4517 if (entry_parm == stack_parm
4518 || (GET_CODE (entry_parm) == PARALLEL
4519 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4520 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4521 /* On some machines, even if a parm value arrives in a register
4522 there is still an (uninitialized) stack slot allocated for it.
4524 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4525 whether this parameter already has a stack slot allocated,
4526 because an arg block exists only if current_function_args_size
4527 is larger than some threshold, and we haven't calculated that
4528 yet. So, for now, we just assume that stack slots never exist
4529 in this case. */
4530 || REG_PARM_STACK_SPACE (fndecl) > 0
4531 #endif
4534 stack_args_size.constant += arg_size.constant;
4535 if (arg_size.var)
4536 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4538 else
4539 /* No stack slot was pushed for this parm. */
4540 stack_parm = 0;
4542 /* Update info on where next arg arrives in registers. */
4544 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4545 passed_type, named_arg);
4547 /* If we can't trust the parm stack slot to be aligned enough
4548 for its ultimate type, don't use that slot after entry.
4549 We'll make another stack slot, if we need one. */
4551 unsigned int thisparm_boundary
4552 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4554 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4555 stack_parm = 0;
4558 /* If parm was passed in memory, and we need to convert it on entry,
4559 don't store it back in that same slot. */
4560 if (entry_parm != 0
4561 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4562 stack_parm = 0;
4564 /* When an argument is passed in multiple locations, we can't
4565 make use of this information, but we can save some copying if
4566 the whole argument is passed in a single register. */
4567 if (GET_CODE (entry_parm) == PARALLEL
4568 && nominal_mode != BLKmode && passed_mode != BLKmode)
4570 int i, len = XVECLEN (entry_parm, 0);
4572 for (i = 0; i < len; i++)
4573 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4574 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4575 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4576 == passed_mode)
4577 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4579 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4580 DECL_INCOMING_RTL (parm) = entry_parm;
4581 break;
4585 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4586 in the mode in which it arrives.
4587 STACK_PARM is an RTX for a stack slot where the parameter can live
4588 during the function (in case we want to put it there).
4589 STACK_PARM is 0 if no stack slot was pushed for it.
4591 Now output code if necessary to convert ENTRY_PARM to
4592 the type in which this function declares it,
4593 and store that result in an appropriate place,
4594 which may be a pseudo reg, may be STACK_PARM,
4595 or may be a local stack slot if STACK_PARM is 0.
4597 Set DECL_RTL to that place. */
4599 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4601 /* If a BLKmode arrives in registers, copy it to a stack slot.
4602 Handle calls that pass values in multiple non-contiguous
4603 locations. The Irix 6 ABI has examples of this. */
4604 if (GET_CODE (entry_parm) == REG
4605 || GET_CODE (entry_parm) == PARALLEL)
4607 int size_stored
4608 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4609 UNITS_PER_WORD);
4611 /* Note that we will be storing an integral number of words.
4612 So we have to be careful to ensure that we allocate an
4613 integral number of words. We do this below in the
4614 assign_stack_local if space was not allocated in the argument
4615 list. If it was, this will not work if PARM_BOUNDARY is not
4616 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4617 if it becomes a problem. */
4619 if (stack_parm == 0)
4621 stack_parm
4622 = assign_stack_local (GET_MODE (entry_parm),
4623 size_stored, 0);
4624 set_mem_attributes (stack_parm, parm, 1);
4627 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4628 abort ();
4630 /* Handle calls that pass values in multiple non-contiguous
4631 locations. The Irix 6 ABI has examples of this. */
4632 if (GET_CODE (entry_parm) == PARALLEL)
4633 emit_group_store (validize_mem (stack_parm), entry_parm,
4634 int_size_in_bytes (TREE_TYPE (parm)));
4635 else
4636 move_block_from_reg (REGNO (entry_parm),
4637 validize_mem (stack_parm),
4638 size_stored / UNITS_PER_WORD,
4639 int_size_in_bytes (TREE_TYPE (parm)));
4641 SET_DECL_RTL (parm, stack_parm);
4643 else if (! ((! optimize
4644 && ! DECL_REGISTER (parm)
4645 && ! DECL_INLINE (fndecl))
4646 || TREE_SIDE_EFFECTS (parm)
4647 /* If -ffloat-store specified, don't put explicit
4648 float variables into registers. */
4649 || (flag_float_store
4650 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4651 /* Always assign pseudo to structure return or item passed
4652 by invisible reference. */
4653 || passed_pointer || parm == function_result_decl)
4655 /* Store the parm in a pseudoregister during the function, but we
4656 may need to do it in a wider mode. */
4658 rtx parmreg;
4659 unsigned int regno, regnoi = 0, regnor = 0;
4661 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4663 promoted_nominal_mode
4664 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4666 parmreg = gen_reg_rtx (promoted_nominal_mode);
4667 mark_user_reg (parmreg);
4669 /* If this was an item that we received a pointer to, set DECL_RTL
4670 appropriately. */
4671 if (passed_pointer)
4673 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4674 parmreg);
4675 set_mem_attributes (x, parm, 1);
4676 SET_DECL_RTL (parm, x);
4678 else
4680 SET_DECL_RTL (parm, parmreg);
4681 maybe_set_unchanging (DECL_RTL (parm), parm);
4684 /* Copy the value into the register. */
4685 if (nominal_mode != passed_mode
4686 || promoted_nominal_mode != promoted_mode)
4688 int save_tree_used;
4689 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4690 mode, by the caller. We now have to convert it to
4691 NOMINAL_MODE, if different. However, PARMREG may be in
4692 a different mode than NOMINAL_MODE if it is being stored
4693 promoted.
4695 If ENTRY_PARM is a hard register, it might be in a register
4696 not valid for operating in its mode (e.g., an odd-numbered
4697 register for a DFmode). In that case, moves are the only
4698 thing valid, so we can't do a convert from there. This
4699 occurs when the calling sequence allow such misaligned
4700 usages.
4702 In addition, the conversion may involve a call, which could
4703 clobber parameters which haven't been copied to pseudo
4704 registers yet. Therefore, we must first copy the parm to
4705 a pseudo reg here, and save the conversion until after all
4706 parameters have been moved. */
4708 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4710 emit_move_insn (tempreg, validize_mem (entry_parm));
4712 push_to_sequence (conversion_insns);
4713 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4715 if (GET_CODE (tempreg) == SUBREG
4716 && GET_MODE (tempreg) == nominal_mode
4717 && GET_CODE (SUBREG_REG (tempreg)) == REG
4718 && nominal_mode == passed_mode
4719 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4720 && GET_MODE_SIZE (GET_MODE (tempreg))
4721 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4723 /* The argument is already sign/zero extended, so note it
4724 into the subreg. */
4725 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4726 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4729 /* TREE_USED gets set erroneously during expand_assignment. */
4730 save_tree_used = TREE_USED (parm);
4731 expand_assignment (parm,
4732 make_tree (nominal_type, tempreg), 0, 0);
4733 TREE_USED (parm) = save_tree_used;
4734 conversion_insns = get_insns ();
4735 did_conversion = 1;
4736 end_sequence ();
4738 else
4739 emit_move_insn (parmreg, validize_mem (entry_parm));
4741 /* If we were passed a pointer but the actual value
4742 can safely live in a register, put it in one. */
4743 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4744 && ! ((! optimize
4745 && ! DECL_REGISTER (parm)
4746 && ! DECL_INLINE (fndecl))
4747 || TREE_SIDE_EFFECTS (parm)
4748 /* If -ffloat-store specified, don't put explicit
4749 float variables into registers. */
4750 || (flag_float_store
4751 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4753 /* We can't use nominal_mode, because it will have been set to
4754 Pmode above. We must use the actual mode of the parm. */
4755 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4756 mark_user_reg (parmreg);
4757 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4759 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4760 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4761 push_to_sequence (conversion_insns);
4762 emit_move_insn (tempreg, DECL_RTL (parm));
4763 SET_DECL_RTL (parm,
4764 convert_to_mode (GET_MODE (parmreg),
4765 tempreg,
4766 unsigned_p));
4767 emit_move_insn (parmreg, DECL_RTL (parm));
4768 conversion_insns = get_insns();
4769 did_conversion = 1;
4770 end_sequence ();
4772 else
4773 emit_move_insn (parmreg, DECL_RTL (parm));
4774 SET_DECL_RTL (parm, parmreg);
4775 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4776 now the parm. */
4777 stack_parm = 0;
4779 #ifdef FUNCTION_ARG_CALLEE_COPIES
4780 /* If we are passed an arg by reference and it is our responsibility
4781 to make a copy, do it now.
4782 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4783 original argument, so we must recreate them in the call to
4784 FUNCTION_ARG_CALLEE_COPIES. */
4785 /* ??? Later add code to handle the case that if the argument isn't
4786 modified, don't do the copy. */
4788 else if (passed_pointer
4789 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4790 TYPE_MODE (DECL_ARG_TYPE (parm)),
4791 DECL_ARG_TYPE (parm),
4792 named_arg)
4793 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4795 rtx copy;
4796 tree type = DECL_ARG_TYPE (parm);
4798 /* This sequence may involve a library call perhaps clobbering
4799 registers that haven't been copied to pseudos yet. */
4801 push_to_sequence (conversion_insns);
4803 if (!COMPLETE_TYPE_P (type)
4804 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4805 /* This is a variable sized object. */
4806 copy = gen_rtx_MEM (BLKmode,
4807 allocate_dynamic_stack_space
4808 (expr_size (parm), NULL_RTX,
4809 TYPE_ALIGN (type)));
4810 else
4811 copy = assign_stack_temp (TYPE_MODE (type),
4812 int_size_in_bytes (type), 1);
4813 set_mem_attributes (copy, parm, 1);
4815 store_expr (parm, copy, 0);
4816 emit_move_insn (parmreg, XEXP (copy, 0));
4817 if (current_function_check_memory_usage)
4818 emit_library_call (chkr_set_right_libfunc,
4819 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4820 XEXP (copy, 0), Pmode,
4821 GEN_INT (int_size_in_bytes (type)),
4822 TYPE_MODE (sizetype),
4823 GEN_INT (MEMORY_USE_RW),
4824 TYPE_MODE (integer_type_node));
4825 conversion_insns = get_insns ();
4826 did_conversion = 1;
4827 end_sequence ();
4829 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4831 /* In any case, record the parm's desired stack location
4832 in case we later discover it must live in the stack.
4834 If it is a COMPLEX value, store the stack location for both
4835 halves. */
4837 if (GET_CODE (parmreg) == CONCAT)
4838 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4839 else
4840 regno = REGNO (parmreg);
4842 if (regno >= max_parm_reg)
4844 rtx *new;
4845 int old_max_parm_reg = max_parm_reg;
4847 /* It's slow to expand this one register at a time,
4848 but it's also rare and we need max_parm_reg to be
4849 precisely correct. */
4850 max_parm_reg = regno + 1;
4851 new = (rtx *) xrealloc (parm_reg_stack_loc,
4852 max_parm_reg * sizeof (rtx));
4853 memset ((char *) (new + old_max_parm_reg), 0,
4854 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4855 parm_reg_stack_loc = new;
4858 if (GET_CODE (parmreg) == CONCAT)
4860 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4862 regnor = REGNO (gen_realpart (submode, parmreg));
4863 regnoi = REGNO (gen_imagpart (submode, parmreg));
4865 if (stack_parm != 0)
4867 parm_reg_stack_loc[regnor]
4868 = gen_realpart (submode, stack_parm);
4869 parm_reg_stack_loc[regnoi]
4870 = gen_imagpart (submode, stack_parm);
4872 else
4874 parm_reg_stack_loc[regnor] = 0;
4875 parm_reg_stack_loc[regnoi] = 0;
4878 else
4879 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4881 /* Mark the register as eliminable if we did no conversion
4882 and it was copied from memory at a fixed offset,
4883 and the arg pointer was not copied to a pseudo-reg.
4884 If the arg pointer is a pseudo reg or the offset formed
4885 an invalid address, such memory-equivalences
4886 as we make here would screw up life analysis for it. */
4887 if (nominal_mode == passed_mode
4888 && ! did_conversion
4889 && stack_parm != 0
4890 && GET_CODE (stack_parm) == MEM
4891 && stack_offset.var == 0
4892 && reg_mentioned_p (virtual_incoming_args_rtx,
4893 XEXP (stack_parm, 0)))
4895 rtx linsn = get_last_insn ();
4896 rtx sinsn, set;
4898 /* Mark complex types separately. */
4899 if (GET_CODE (parmreg) == CONCAT)
4900 /* Scan backwards for the set of the real and
4901 imaginary parts. */
4902 for (sinsn = linsn; sinsn != 0;
4903 sinsn = prev_nonnote_insn (sinsn))
4905 set = single_set (sinsn);
4906 if (set != 0
4907 && SET_DEST (set) == regno_reg_rtx [regnoi])
4908 REG_NOTES (sinsn)
4909 = gen_rtx_EXPR_LIST (REG_EQUIV,
4910 parm_reg_stack_loc[regnoi],
4911 REG_NOTES (sinsn));
4912 else if (set != 0
4913 && SET_DEST (set) == regno_reg_rtx [regnor])
4914 REG_NOTES (sinsn)
4915 = gen_rtx_EXPR_LIST (REG_EQUIV,
4916 parm_reg_stack_loc[regnor],
4917 REG_NOTES (sinsn));
4919 else if ((set = single_set (linsn)) != 0
4920 && SET_DEST (set) == parmreg)
4921 REG_NOTES (linsn)
4922 = gen_rtx_EXPR_LIST (REG_EQUIV,
4923 stack_parm, REG_NOTES (linsn));
4926 /* For pointer data type, suggest pointer register. */
4927 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4928 mark_reg_pointer (parmreg,
4929 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4931 /* If something wants our address, try to use ADDRESSOF. */
4932 if (TREE_ADDRESSABLE (parm))
4934 /* If we end up putting something into the stack,
4935 fixup_var_refs_insns will need to make a pass over
4936 all the instructions. It looks throughs the pending
4937 sequences -- but it can't see the ones in the
4938 CONVERSION_INSNS, if they're not on the sequence
4939 stack. So, we go back to that sequence, just so that
4940 the fixups will happen. */
4941 push_to_sequence (conversion_insns);
4942 put_var_into_stack (parm);
4943 conversion_insns = get_insns ();
4944 end_sequence ();
4947 else
4949 /* Value must be stored in the stack slot STACK_PARM
4950 during function execution. */
4952 if (promoted_mode != nominal_mode)
4954 /* Conversion is required. */
4955 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4957 emit_move_insn (tempreg, validize_mem (entry_parm));
4959 push_to_sequence (conversion_insns);
4960 entry_parm = convert_to_mode (nominal_mode, tempreg,
4961 TREE_UNSIGNED (TREE_TYPE (parm)));
4962 if (stack_parm)
4963 /* ??? This may need a big-endian conversion on sparc64. */
4964 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
4966 conversion_insns = get_insns ();
4967 did_conversion = 1;
4968 end_sequence ();
4971 if (entry_parm != stack_parm)
4973 if (stack_parm == 0)
4975 stack_parm
4976 = assign_stack_local (GET_MODE (entry_parm),
4977 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4978 set_mem_attributes (stack_parm, parm, 1);
4981 if (promoted_mode != nominal_mode)
4983 push_to_sequence (conversion_insns);
4984 emit_move_insn (validize_mem (stack_parm),
4985 validize_mem (entry_parm));
4986 conversion_insns = get_insns ();
4987 end_sequence ();
4989 else
4990 emit_move_insn (validize_mem (stack_parm),
4991 validize_mem (entry_parm));
4993 if (current_function_check_memory_usage)
4995 push_to_sequence (conversion_insns);
4996 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
4997 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
4998 GEN_INT (GET_MODE_SIZE (GET_MODE
4999 (entry_parm))),
5000 TYPE_MODE (sizetype),
5001 GEN_INT (MEMORY_USE_RW),
5002 TYPE_MODE (integer_type_node));
5004 conversion_insns = get_insns ();
5005 end_sequence ();
5007 SET_DECL_RTL (parm, stack_parm);
5010 /* If this "parameter" was the place where we are receiving the
5011 function's incoming structure pointer, set up the result. */
5012 if (parm == function_result_decl)
5014 tree result = DECL_RESULT (fndecl);
5015 rtx addr = DECL_RTL (parm);
5016 rtx x;
5018 #ifdef POINTERS_EXTEND_UNSIGNED
5019 if (GET_MODE (addr) != Pmode)
5020 addr = convert_memory_address (Pmode, addr);
5021 #endif
5023 x = gen_rtx_MEM (DECL_MODE (result), addr);
5024 set_mem_attributes (x, result, 1);
5025 SET_DECL_RTL (result, x);
5028 if (GET_CODE (DECL_RTL (parm)) == REG)
5029 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5030 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5032 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5033 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5038 /* Output all parameter conversion instructions (possibly including calls)
5039 now that all parameters have been copied out of hard registers. */
5040 emit_insns (conversion_insns);
5042 last_parm_insn = get_last_insn ();
5044 current_function_args_size = stack_args_size.constant;
5046 /* Adjust function incoming argument size for alignment and
5047 minimum length. */
5049 #ifdef REG_PARM_STACK_SPACE
5050 #ifndef MAYBE_REG_PARM_STACK_SPACE
5051 current_function_args_size = MAX (current_function_args_size,
5052 REG_PARM_STACK_SPACE (fndecl));
5053 #endif
5054 #endif
5056 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5058 current_function_args_size
5059 = ((current_function_args_size + STACK_BYTES - 1)
5060 / STACK_BYTES) * STACK_BYTES;
5062 #ifdef ARGS_GROW_DOWNWARD
5063 current_function_arg_offset_rtx
5064 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5065 : expand_expr (size_diffop (stack_args_size.var,
5066 size_int (-stack_args_size.constant)),
5067 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5068 #else
5069 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5070 #endif
5072 /* See how many bytes, if any, of its args a function should try to pop
5073 on return. */
5075 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5076 current_function_args_size);
5078 /* For stdarg.h function, save info about
5079 regs and stack space used by the named args. */
5081 if (!hide_last_arg)
5082 current_function_args_info = args_so_far;
5084 /* Set the rtx used for the function return value. Put this in its
5085 own variable so any optimizers that need this information don't have
5086 to include tree.h. Do this here so it gets done when an inlined
5087 function gets output. */
5089 current_function_return_rtx
5090 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5091 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5094 /* Indicate whether REGNO is an incoming argument to the current function
5095 that was promoted to a wider mode. If so, return the RTX for the
5096 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5097 that REGNO is promoted from and whether the promotion was signed or
5098 unsigned. */
5100 #ifdef PROMOTE_FUNCTION_ARGS
5103 promoted_input_arg (regno, pmode, punsignedp)
5104 unsigned int regno;
5105 enum machine_mode *pmode;
5106 int *punsignedp;
5108 tree arg;
5110 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5111 arg = TREE_CHAIN (arg))
5112 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5113 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5114 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5116 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5117 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5119 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5120 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5121 && mode != DECL_MODE (arg))
5123 *pmode = DECL_MODE (arg);
5124 *punsignedp = unsignedp;
5125 return DECL_INCOMING_RTL (arg);
5129 return 0;
5132 #endif
5134 /* Compute the size and offset from the start of the stacked arguments for a
5135 parm passed in mode PASSED_MODE and with type TYPE.
5137 INITIAL_OFFSET_PTR points to the current offset into the stacked
5138 arguments.
5140 The starting offset and size for this parm are returned in *OFFSET_PTR
5141 and *ARG_SIZE_PTR, respectively.
5143 IN_REGS is non-zero if the argument will be passed in registers. It will
5144 never be set if REG_PARM_STACK_SPACE is not defined.
5146 FNDECL is the function in which the argument was defined.
5148 There are two types of rounding that are done. The first, controlled by
5149 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5150 list to be aligned to the specific boundary (in bits). This rounding
5151 affects the initial and starting offsets, but not the argument size.
5153 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5154 optionally rounds the size of the parm to PARM_BOUNDARY. The
5155 initial offset is not affected by this rounding, while the size always
5156 is and the starting offset may be. */
5158 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5159 initial_offset_ptr is positive because locate_and_pad_parm's
5160 callers pass in the total size of args so far as
5161 initial_offset_ptr. arg_size_ptr is always positive.*/
5163 void
5164 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5165 initial_offset_ptr, offset_ptr, arg_size_ptr,
5166 alignment_pad)
5167 enum machine_mode passed_mode;
5168 tree type;
5169 int in_regs ATTRIBUTE_UNUSED;
5170 tree fndecl ATTRIBUTE_UNUSED;
5171 struct args_size *initial_offset_ptr;
5172 struct args_size *offset_ptr;
5173 struct args_size *arg_size_ptr;
5174 struct args_size *alignment_pad;
5177 tree sizetree
5178 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5179 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5180 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5182 #ifdef REG_PARM_STACK_SPACE
5183 /* If we have found a stack parm before we reach the end of the
5184 area reserved for registers, skip that area. */
5185 if (! in_regs)
5187 int reg_parm_stack_space = 0;
5189 #ifdef MAYBE_REG_PARM_STACK_SPACE
5190 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5191 #else
5192 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5193 #endif
5194 if (reg_parm_stack_space > 0)
5196 if (initial_offset_ptr->var)
5198 initial_offset_ptr->var
5199 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5200 ssize_int (reg_parm_stack_space));
5201 initial_offset_ptr->constant = 0;
5203 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5204 initial_offset_ptr->constant = reg_parm_stack_space;
5207 #endif /* REG_PARM_STACK_SPACE */
5209 arg_size_ptr->var = 0;
5210 arg_size_ptr->constant = 0;
5211 alignment_pad->var = 0;
5212 alignment_pad->constant = 0;
5214 #ifdef ARGS_GROW_DOWNWARD
5215 if (initial_offset_ptr->var)
5217 offset_ptr->constant = 0;
5218 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5219 initial_offset_ptr->var);
5221 else
5223 offset_ptr->constant = -initial_offset_ptr->constant;
5224 offset_ptr->var = 0;
5226 if (where_pad != none
5227 && (!host_integerp (sizetree, 1)
5228 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5229 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5230 SUB_PARM_SIZE (*offset_ptr, sizetree);
5231 if (where_pad != downward)
5232 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5233 if (initial_offset_ptr->var)
5234 arg_size_ptr->var = size_binop (MINUS_EXPR,
5235 size_binop (MINUS_EXPR,
5236 ssize_int (0),
5237 initial_offset_ptr->var),
5238 offset_ptr->var);
5240 else
5241 arg_size_ptr->constant = (-initial_offset_ptr->constant
5242 - offset_ptr->constant);
5244 #else /* !ARGS_GROW_DOWNWARD */
5245 if (!in_regs
5246 #ifdef REG_PARM_STACK_SPACE
5247 || REG_PARM_STACK_SPACE (fndecl) > 0
5248 #endif
5250 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5251 *offset_ptr = *initial_offset_ptr;
5253 #ifdef PUSH_ROUNDING
5254 if (passed_mode != BLKmode)
5255 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5256 #endif
5258 /* Pad_below needs the pre-rounded size to know how much to pad below
5259 so this must be done before rounding up. */
5260 if (where_pad == downward
5261 /* However, BLKmode args passed in regs have their padding done elsewhere.
5262 The stack slot must be able to hold the entire register. */
5263 && !(in_regs && passed_mode == BLKmode))
5264 pad_below (offset_ptr, passed_mode, sizetree);
5266 if (where_pad != none
5267 && (!host_integerp (sizetree, 1)
5268 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5269 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5271 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5272 #endif /* ARGS_GROW_DOWNWARD */
5275 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5276 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5278 static void
5279 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5280 struct args_size *offset_ptr;
5281 int boundary;
5282 struct args_size *alignment_pad;
5284 tree save_var = NULL_TREE;
5285 HOST_WIDE_INT save_constant = 0;
5287 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5289 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5291 save_var = offset_ptr->var;
5292 save_constant = offset_ptr->constant;
5295 alignment_pad->var = NULL_TREE;
5296 alignment_pad->constant = 0;
5298 if (boundary > BITS_PER_UNIT)
5300 if (offset_ptr->var)
5302 offset_ptr->var =
5303 #ifdef ARGS_GROW_DOWNWARD
5304 round_down
5305 #else
5306 round_up
5307 #endif
5308 (ARGS_SIZE_TREE (*offset_ptr),
5309 boundary / BITS_PER_UNIT);
5310 offset_ptr->constant = 0; /*?*/
5311 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5312 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5313 save_var);
5315 else
5317 offset_ptr->constant =
5318 #ifdef ARGS_GROW_DOWNWARD
5319 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5320 #else
5321 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5322 #endif
5323 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5324 alignment_pad->constant = offset_ptr->constant - save_constant;
5329 #ifndef ARGS_GROW_DOWNWARD
5330 static void
5331 pad_below (offset_ptr, passed_mode, sizetree)
5332 struct args_size *offset_ptr;
5333 enum machine_mode passed_mode;
5334 tree sizetree;
5336 if (passed_mode != BLKmode)
5338 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5339 offset_ptr->constant
5340 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5341 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5342 - GET_MODE_SIZE (passed_mode));
5344 else
5346 if (TREE_CODE (sizetree) != INTEGER_CST
5347 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5349 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5350 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5351 /* Add it in. */
5352 ADD_PARM_SIZE (*offset_ptr, s2);
5353 SUB_PARM_SIZE (*offset_ptr, sizetree);
5357 #endif
5359 /* Walk the tree of blocks describing the binding levels within a function
5360 and warn about uninitialized variables.
5361 This is done after calling flow_analysis and before global_alloc
5362 clobbers the pseudo-regs to hard regs. */
5364 void
5365 uninitialized_vars_warning (block)
5366 tree block;
5368 tree decl, sub;
5369 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5371 if (warn_uninitialized
5372 && TREE_CODE (decl) == VAR_DECL
5373 /* These warnings are unreliable for and aggregates
5374 because assigning the fields one by one can fail to convince
5375 flow.c that the entire aggregate was initialized.
5376 Unions are troublesome because members may be shorter. */
5377 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5378 && DECL_RTL (decl) != 0
5379 && GET_CODE (DECL_RTL (decl)) == REG
5380 /* Global optimizations can make it difficult to determine if a
5381 particular variable has been initialized. However, a VAR_DECL
5382 with a nonzero DECL_INITIAL had an initializer, so do not
5383 claim it is potentially uninitialized.
5385 We do not care about the actual value in DECL_INITIAL, so we do
5386 not worry that it may be a dangling pointer. */
5387 && DECL_INITIAL (decl) == NULL_TREE
5388 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5389 warning_with_decl (decl,
5390 "`%s' might be used uninitialized in this function");
5391 if (extra_warnings
5392 && TREE_CODE (decl) == VAR_DECL
5393 && DECL_RTL (decl) != 0
5394 && GET_CODE (DECL_RTL (decl)) == REG
5395 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5396 warning_with_decl (decl,
5397 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5399 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5400 uninitialized_vars_warning (sub);
5403 /* Do the appropriate part of uninitialized_vars_warning
5404 but for arguments instead of local variables. */
5406 void
5407 setjmp_args_warning ()
5409 tree decl;
5410 for (decl = DECL_ARGUMENTS (current_function_decl);
5411 decl; decl = TREE_CHAIN (decl))
5412 if (DECL_RTL (decl) != 0
5413 && GET_CODE (DECL_RTL (decl)) == REG
5414 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5415 warning_with_decl (decl,
5416 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5419 /* If this function call setjmp, put all vars into the stack
5420 unless they were declared `register'. */
5422 void
5423 setjmp_protect (block)
5424 tree block;
5426 tree decl, sub;
5427 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5428 if ((TREE_CODE (decl) == VAR_DECL
5429 || TREE_CODE (decl) == PARM_DECL)
5430 && DECL_RTL (decl) != 0
5431 && (GET_CODE (DECL_RTL (decl)) == REG
5432 || (GET_CODE (DECL_RTL (decl)) == MEM
5433 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5434 /* If this variable came from an inline function, it must be
5435 that its life doesn't overlap the setjmp. If there was a
5436 setjmp in the function, it would already be in memory. We
5437 must exclude such variable because their DECL_RTL might be
5438 set to strange things such as virtual_stack_vars_rtx. */
5439 && ! DECL_FROM_INLINE (decl)
5440 && (
5441 #ifdef NON_SAVING_SETJMP
5442 /* If longjmp doesn't restore the registers,
5443 don't put anything in them. */
5444 NON_SAVING_SETJMP
5446 #endif
5447 ! DECL_REGISTER (decl)))
5448 put_var_into_stack (decl);
5449 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5450 setjmp_protect (sub);
5453 /* Like the previous function, but for args instead of local variables. */
5455 void
5456 setjmp_protect_args ()
5458 tree decl;
5459 for (decl = DECL_ARGUMENTS (current_function_decl);
5460 decl; decl = TREE_CHAIN (decl))
5461 if ((TREE_CODE (decl) == VAR_DECL
5462 || TREE_CODE (decl) == PARM_DECL)
5463 && DECL_RTL (decl) != 0
5464 && (GET_CODE (DECL_RTL (decl)) == REG
5465 || (GET_CODE (DECL_RTL (decl)) == MEM
5466 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5467 && (
5468 /* If longjmp doesn't restore the registers,
5469 don't put anything in them. */
5470 #ifdef NON_SAVING_SETJMP
5471 NON_SAVING_SETJMP
5473 #endif
5474 ! DECL_REGISTER (decl)))
5475 put_var_into_stack (decl);
5478 /* Return the context-pointer register corresponding to DECL,
5479 or 0 if it does not need one. */
5482 lookup_static_chain (decl)
5483 tree decl;
5485 tree context = decl_function_context (decl);
5486 tree link;
5488 if (context == 0
5489 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5490 return 0;
5492 /* We treat inline_function_decl as an alias for the current function
5493 because that is the inline function whose vars, types, etc.
5494 are being merged into the current function.
5495 See expand_inline_function. */
5496 if (context == current_function_decl || context == inline_function_decl)
5497 return virtual_stack_vars_rtx;
5499 for (link = context_display; link; link = TREE_CHAIN (link))
5500 if (TREE_PURPOSE (link) == context)
5501 return RTL_EXPR_RTL (TREE_VALUE (link));
5503 abort ();
5506 /* Convert a stack slot address ADDR for variable VAR
5507 (from a containing function)
5508 into an address valid in this function (using a static chain). */
5511 fix_lexical_addr (addr, var)
5512 rtx addr;
5513 tree var;
5515 rtx basereg;
5516 HOST_WIDE_INT displacement;
5517 tree context = decl_function_context (var);
5518 struct function *fp;
5519 rtx base = 0;
5521 /* If this is the present function, we need not do anything. */
5522 if (context == current_function_decl || context == inline_function_decl)
5523 return addr;
5525 fp = find_function_data (context);
5527 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5528 addr = XEXP (XEXP (addr, 0), 0);
5530 /* Decode given address as base reg plus displacement. */
5531 if (GET_CODE (addr) == REG)
5532 basereg = addr, displacement = 0;
5533 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5534 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5535 else
5536 abort ();
5538 /* We accept vars reached via the containing function's
5539 incoming arg pointer and via its stack variables pointer. */
5540 if (basereg == fp->internal_arg_pointer)
5542 /* If reached via arg pointer, get the arg pointer value
5543 out of that function's stack frame.
5545 There are two cases: If a separate ap is needed, allocate a
5546 slot in the outer function for it and dereference it that way.
5547 This is correct even if the real ap is actually a pseudo.
5548 Otherwise, just adjust the offset from the frame pointer to
5549 compensate. */
5551 #ifdef NEED_SEPARATE_AP
5552 rtx addr;
5554 addr = get_arg_pointer_save_area (fp);
5555 addr = fix_lexical_addr (XEXP (addr, 0), var);
5556 addr = memory_address (Pmode, addr);
5558 base = gen_rtx_MEM (Pmode, addr);
5559 set_mem_alias_set (base, get_frame_alias_set ());
5560 base = copy_to_reg (base);
5561 #else
5562 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5563 base = lookup_static_chain (var);
5564 #endif
5567 else if (basereg == virtual_stack_vars_rtx)
5569 /* This is the same code as lookup_static_chain, duplicated here to
5570 avoid an extra call to decl_function_context. */
5571 tree link;
5573 for (link = context_display; link; link = TREE_CHAIN (link))
5574 if (TREE_PURPOSE (link) == context)
5576 base = RTL_EXPR_RTL (TREE_VALUE (link));
5577 break;
5581 if (base == 0)
5582 abort ();
5584 /* Use same offset, relative to appropriate static chain or argument
5585 pointer. */
5586 return plus_constant (base, displacement);
5589 /* Return the address of the trampoline for entering nested fn FUNCTION.
5590 If necessary, allocate a trampoline (in the stack frame)
5591 and emit rtl to initialize its contents (at entry to this function). */
5594 trampoline_address (function)
5595 tree function;
5597 tree link;
5598 tree rtlexp;
5599 rtx tramp;
5600 struct function *fp;
5601 tree fn_context;
5603 /* Find an existing trampoline and return it. */
5604 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5605 if (TREE_PURPOSE (link) == function)
5606 return
5607 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5609 for (fp = outer_function_chain; fp; fp = fp->outer)
5610 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5611 if (TREE_PURPOSE (link) == function)
5613 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5614 function);
5615 return adjust_trampoline_addr (tramp);
5618 /* None exists; we must make one. */
5620 /* Find the `struct function' for the function containing FUNCTION. */
5621 fp = 0;
5622 fn_context = decl_function_context (function);
5623 if (fn_context != current_function_decl
5624 && fn_context != inline_function_decl)
5625 fp = find_function_data (fn_context);
5627 /* Allocate run-time space for this trampoline
5628 (usually in the defining function's stack frame). */
5629 #ifdef ALLOCATE_TRAMPOLINE
5630 tramp = ALLOCATE_TRAMPOLINE (fp);
5631 #else
5632 /* If rounding needed, allocate extra space
5633 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5634 #ifdef TRAMPOLINE_ALIGNMENT
5635 #define TRAMPOLINE_REAL_SIZE \
5636 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5637 #else
5638 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5639 #endif
5640 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5641 fp ? fp : cfun);
5642 #endif
5644 /* Record the trampoline for reuse and note it for later initialization
5645 by expand_function_end. */
5646 if (fp != 0)
5648 rtlexp = make_node (RTL_EXPR);
5649 RTL_EXPR_RTL (rtlexp) = tramp;
5650 fp->x_trampoline_list = tree_cons (function, rtlexp,
5651 fp->x_trampoline_list);
5653 else
5655 /* Make the RTL_EXPR node temporary, not momentary, so that the
5656 trampoline_list doesn't become garbage. */
5657 rtlexp = make_node (RTL_EXPR);
5659 RTL_EXPR_RTL (rtlexp) = tramp;
5660 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5663 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5664 return adjust_trampoline_addr (tramp);
5667 /* Given a trampoline address,
5668 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5670 static rtx
5671 round_trampoline_addr (tramp)
5672 rtx tramp;
5674 #ifdef TRAMPOLINE_ALIGNMENT
5675 /* Round address up to desired boundary. */
5676 rtx temp = gen_reg_rtx (Pmode);
5677 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5678 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5680 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5681 temp, 0, OPTAB_LIB_WIDEN);
5682 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5683 temp, 0, OPTAB_LIB_WIDEN);
5684 #endif
5685 return tramp;
5688 /* Given a trampoline address, round it then apply any
5689 platform-specific adjustments so that the result can be used for a
5690 function call . */
5692 static rtx
5693 adjust_trampoline_addr (tramp)
5694 rtx tramp;
5696 tramp = round_trampoline_addr (tramp);
5697 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5698 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5699 #endif
5700 return tramp;
5703 /* Put all this function's BLOCK nodes including those that are chained
5704 onto the first block into a vector, and return it.
5705 Also store in each NOTE for the beginning or end of a block
5706 the index of that block in the vector.
5707 The arguments are BLOCK, the chain of top-level blocks of the function,
5708 and INSNS, the insn chain of the function. */
5710 void
5711 identify_blocks ()
5713 int n_blocks;
5714 tree *block_vector, *last_block_vector;
5715 tree *block_stack;
5716 tree block = DECL_INITIAL (current_function_decl);
5718 if (block == 0)
5719 return;
5721 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5722 depth-first order. */
5723 block_vector = get_block_vector (block, &n_blocks);
5724 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5726 last_block_vector = identify_blocks_1 (get_insns (),
5727 block_vector + 1,
5728 block_vector + n_blocks,
5729 block_stack);
5731 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5732 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5733 if (0 && last_block_vector != block_vector + n_blocks)
5734 abort ();
5736 free (block_vector);
5737 free (block_stack);
5740 /* Subroutine of identify_blocks. Do the block substitution on the
5741 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5743 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5744 BLOCK_VECTOR is incremented for each block seen. */
5746 static tree *
5747 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5748 rtx insns;
5749 tree *block_vector;
5750 tree *end_block_vector;
5751 tree *orig_block_stack;
5753 rtx insn;
5754 tree *block_stack = orig_block_stack;
5756 for (insn = insns; insn; insn = NEXT_INSN (insn))
5758 if (GET_CODE (insn) == NOTE)
5760 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5762 tree b;
5764 /* If there are more block notes than BLOCKs, something
5765 is badly wrong. */
5766 if (block_vector == end_block_vector)
5767 abort ();
5769 b = *block_vector++;
5770 NOTE_BLOCK (insn) = b;
5771 *block_stack++ = b;
5773 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5775 /* If there are more NOTE_INSN_BLOCK_ENDs than
5776 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5777 if (block_stack == orig_block_stack)
5778 abort ();
5780 NOTE_BLOCK (insn) = *--block_stack;
5783 else if (GET_CODE (insn) == CALL_INSN
5784 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5786 rtx cp = PATTERN (insn);
5788 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5789 end_block_vector, block_stack);
5790 if (XEXP (cp, 1))
5791 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5792 end_block_vector, block_stack);
5793 if (XEXP (cp, 2))
5794 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5795 end_block_vector, block_stack);
5799 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5800 something is badly wrong. */
5801 if (block_stack != orig_block_stack)
5802 abort ();
5804 return block_vector;
5807 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5808 and create duplicate blocks. */
5809 /* ??? Need an option to either create block fragments or to create
5810 abstract origin duplicates of a source block. It really depends
5811 on what optimization has been performed. */
5813 void
5814 reorder_blocks ()
5816 tree block = DECL_INITIAL (current_function_decl);
5817 varray_type block_stack;
5819 if (block == NULL_TREE)
5820 return;
5822 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5824 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5825 reorder_blocks_0 (block);
5827 /* Prune the old trees away, so that they don't get in the way. */
5828 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5829 BLOCK_CHAIN (block) = NULL_TREE;
5831 /* Recreate the block tree from the note nesting. */
5832 reorder_blocks_1 (get_insns (), block, &block_stack);
5833 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5835 /* Remove deleted blocks from the block fragment chains. */
5836 reorder_fix_fragments (block);
5838 VARRAY_FREE (block_stack);
5841 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5843 static void
5844 reorder_blocks_0 (block)
5845 tree block;
5847 while (block)
5849 TREE_ASM_WRITTEN (block) = 0;
5850 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5851 block = BLOCK_CHAIN (block);
5855 static void
5856 reorder_blocks_1 (insns, current_block, p_block_stack)
5857 rtx insns;
5858 tree current_block;
5859 varray_type *p_block_stack;
5861 rtx insn;
5863 for (insn = insns; insn; insn = NEXT_INSN (insn))
5865 if (GET_CODE (insn) == NOTE)
5867 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5869 tree block = NOTE_BLOCK (insn);
5871 /* If we have seen this block before, that means it now
5872 spans multiple address regions. Create a new fragment. */
5873 if (TREE_ASM_WRITTEN (block))
5875 tree new_block = copy_node (block);
5876 tree origin;
5878 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5879 ? BLOCK_FRAGMENT_ORIGIN (block)
5880 : block);
5881 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5882 BLOCK_FRAGMENT_CHAIN (new_block)
5883 = BLOCK_FRAGMENT_CHAIN (origin);
5884 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5886 NOTE_BLOCK (insn) = new_block;
5887 block = new_block;
5890 BLOCK_SUBBLOCKS (block) = 0;
5891 TREE_ASM_WRITTEN (block) = 1;
5892 BLOCK_SUPERCONTEXT (block) = current_block;
5893 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5894 BLOCK_SUBBLOCKS (current_block) = block;
5895 current_block = block;
5896 VARRAY_PUSH_TREE (*p_block_stack, block);
5898 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5900 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5901 VARRAY_POP (*p_block_stack);
5902 BLOCK_SUBBLOCKS (current_block)
5903 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5904 current_block = BLOCK_SUPERCONTEXT (current_block);
5907 else if (GET_CODE (insn) == CALL_INSN
5908 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5910 rtx cp = PATTERN (insn);
5911 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5912 if (XEXP (cp, 1))
5913 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5914 if (XEXP (cp, 2))
5915 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5920 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5921 appears in the block tree, select one of the fragments to become
5922 the new origin block. */
5924 static void
5925 reorder_fix_fragments (block)
5926 tree block;
5928 while (block)
5930 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5931 tree new_origin = NULL_TREE;
5933 if (dup_origin)
5935 if (! TREE_ASM_WRITTEN (dup_origin))
5937 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
5939 /* Find the first of the remaining fragments. There must
5940 be at least one -- the current block. */
5941 while (! TREE_ASM_WRITTEN (new_origin))
5942 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
5943 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
5946 else if (! dup_origin)
5947 new_origin = block;
5949 /* Re-root the rest of the fragments to the new origin. In the
5950 case that DUP_ORIGIN was null, that means BLOCK was the origin
5951 of a chain of fragments and we want to remove those fragments
5952 that didn't make it to the output. */
5953 if (new_origin)
5955 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
5956 tree chain = *pp;
5958 while (chain)
5960 if (TREE_ASM_WRITTEN (chain))
5962 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
5963 *pp = chain;
5964 pp = &BLOCK_FRAGMENT_CHAIN (chain);
5966 chain = BLOCK_FRAGMENT_CHAIN (chain);
5968 *pp = NULL_TREE;
5971 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
5972 block = BLOCK_CHAIN (block);
5976 /* Reverse the order of elements in the chain T of blocks,
5977 and return the new head of the chain (old last element). */
5979 static tree
5980 blocks_nreverse (t)
5981 tree t;
5983 tree prev = 0, decl, next;
5984 for (decl = t; decl; decl = next)
5986 next = BLOCK_CHAIN (decl);
5987 BLOCK_CHAIN (decl) = prev;
5988 prev = decl;
5990 return prev;
5993 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5994 non-NULL, list them all into VECTOR, in a depth-first preorder
5995 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5996 blocks. */
5998 static int
5999 all_blocks (block, vector)
6000 tree block;
6001 tree *vector;
6003 int n_blocks = 0;
6005 while (block)
6007 TREE_ASM_WRITTEN (block) = 0;
6009 /* Record this block. */
6010 if (vector)
6011 vector[n_blocks] = block;
6013 ++n_blocks;
6015 /* Record the subblocks, and their subblocks... */
6016 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6017 vector ? vector + n_blocks : 0);
6018 block = BLOCK_CHAIN (block);
6021 return n_blocks;
6024 /* Return a vector containing all the blocks rooted at BLOCK. The
6025 number of elements in the vector is stored in N_BLOCKS_P. The
6026 vector is dynamically allocated; it is the caller's responsibility
6027 to call `free' on the pointer returned. */
6029 static tree *
6030 get_block_vector (block, n_blocks_p)
6031 tree block;
6032 int *n_blocks_p;
6034 tree *block_vector;
6036 *n_blocks_p = all_blocks (block, NULL);
6037 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6038 all_blocks (block, block_vector);
6040 return block_vector;
6043 static int next_block_index = 2;
6045 /* Set BLOCK_NUMBER for all the blocks in FN. */
6047 void
6048 number_blocks (fn)
6049 tree fn;
6051 int i;
6052 int n_blocks;
6053 tree *block_vector;
6055 /* For SDB and XCOFF debugging output, we start numbering the blocks
6056 from 1 within each function, rather than keeping a running
6057 count. */
6058 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6059 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6060 next_block_index = 1;
6061 #endif
6063 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6065 /* The top-level BLOCK isn't numbered at all. */
6066 for (i = 1; i < n_blocks; ++i)
6067 /* We number the blocks from two. */
6068 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6070 free (block_vector);
6072 return;
6075 /* Allocate a function structure and reset its contents to the defaults. */
6077 static void
6078 prepare_function_start ()
6080 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6082 init_stmt_for_function ();
6083 init_eh_for_function ();
6085 cse_not_expected = ! optimize;
6087 /* Caller save not needed yet. */
6088 caller_save_needed = 0;
6090 /* No stack slots have been made yet. */
6091 stack_slot_list = 0;
6093 current_function_has_nonlocal_label = 0;
6094 current_function_has_nonlocal_goto = 0;
6096 /* There is no stack slot for handling nonlocal gotos. */
6097 nonlocal_goto_handler_slots = 0;
6098 nonlocal_goto_stack_level = 0;
6100 /* No labels have been declared for nonlocal use. */
6101 nonlocal_labels = 0;
6102 nonlocal_goto_handler_labels = 0;
6104 /* No function calls so far in this function. */
6105 function_call_count = 0;
6107 /* No parm regs have been allocated.
6108 (This is important for output_inline_function.) */
6109 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6111 /* Initialize the RTL mechanism. */
6112 init_emit ();
6114 /* Initialize the queue of pending postincrement and postdecrements,
6115 and some other info in expr.c. */
6116 init_expr ();
6118 /* We haven't done register allocation yet. */
6119 reg_renumber = 0;
6121 init_varasm_status (cfun);
6123 /* Clear out data used for inlining. */
6124 cfun->inlinable = 0;
6125 cfun->original_decl_initial = 0;
6126 cfun->original_arg_vector = 0;
6128 cfun->stack_alignment_needed = STACK_BOUNDARY;
6129 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6131 /* Set if a call to setjmp is seen. */
6132 current_function_calls_setjmp = 0;
6134 /* Set if a call to longjmp is seen. */
6135 current_function_calls_longjmp = 0;
6137 current_function_calls_alloca = 0;
6138 current_function_contains_functions = 0;
6139 current_function_is_leaf = 0;
6140 current_function_nothrow = 0;
6141 current_function_sp_is_unchanging = 0;
6142 current_function_uses_only_leaf_regs = 0;
6143 current_function_has_computed_jump = 0;
6144 current_function_is_thunk = 0;
6146 current_function_returns_pcc_struct = 0;
6147 current_function_returns_struct = 0;
6148 current_function_epilogue_delay_list = 0;
6149 current_function_uses_const_pool = 0;
6150 current_function_uses_pic_offset_table = 0;
6151 current_function_cannot_inline = 0;
6153 /* We have not yet needed to make a label to jump to for tail-recursion. */
6154 tail_recursion_label = 0;
6156 /* We haven't had a need to make a save area for ap yet. */
6157 arg_pointer_save_area = 0;
6159 /* No stack slots allocated yet. */
6160 frame_offset = 0;
6162 /* No SAVE_EXPRs in this function yet. */
6163 save_expr_regs = 0;
6165 /* No RTL_EXPRs in this function yet. */
6166 rtl_expr_chain = 0;
6168 /* Set up to allocate temporaries. */
6169 init_temp_slots ();
6171 /* Indicate that we need to distinguish between the return value of the
6172 present function and the return value of a function being called. */
6173 rtx_equal_function_value_matters = 1;
6175 /* Indicate that we have not instantiated virtual registers yet. */
6176 virtuals_instantiated = 0;
6178 /* Indicate that we want CONCATs now. */
6179 generating_concat_p = 1;
6181 /* Indicate we have no need of a frame pointer yet. */
6182 frame_pointer_needed = 0;
6184 /* By default assume not varargs or stdarg. */
6185 current_function_varargs = 0;
6186 current_function_stdarg = 0;
6188 /* We haven't made any trampolines for this function yet. */
6189 trampoline_list = 0;
6191 init_pending_stack_adjust ();
6192 inhibit_defer_pop = 0;
6194 current_function_outgoing_args_size = 0;
6196 if (init_lang_status)
6197 (*init_lang_status) (cfun);
6198 if (init_machine_status)
6199 (*init_machine_status) (cfun);
6202 /* Initialize the rtl expansion mechanism so that we can do simple things
6203 like generate sequences. This is used to provide a context during global
6204 initialization of some passes. */
6205 void
6206 init_dummy_function_start ()
6208 prepare_function_start ();
6211 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6212 and initialize static variables for generating RTL for the statements
6213 of the function. */
6215 void
6216 init_function_start (subr, filename, line)
6217 tree subr;
6218 const char *filename;
6219 int line;
6221 prepare_function_start ();
6223 current_function_name = (*decl_printable_name) (subr, 2);
6224 cfun->decl = subr;
6226 /* Nonzero if this is a nested function that uses a static chain. */
6228 current_function_needs_context
6229 = (decl_function_context (current_function_decl) != 0
6230 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6232 /* Within function body, compute a type's size as soon it is laid out. */
6233 immediate_size_expand++;
6235 /* Prevent ever trying to delete the first instruction of a function.
6236 Also tell final how to output a linenum before the function prologue.
6237 Note linenums could be missing, e.g. when compiling a Java .class file. */
6238 if (line > 0)
6239 emit_line_note (filename, line);
6241 /* Make sure first insn is a note even if we don't want linenums.
6242 This makes sure the first insn will never be deleted.
6243 Also, final expects a note to appear there. */
6244 emit_note (NULL, NOTE_INSN_DELETED);
6246 /* Set flags used by final.c. */
6247 if (aggregate_value_p (DECL_RESULT (subr)))
6249 #ifdef PCC_STATIC_STRUCT_RETURN
6250 current_function_returns_pcc_struct = 1;
6251 #endif
6252 current_function_returns_struct = 1;
6255 /* Warn if this value is an aggregate type,
6256 regardless of which calling convention we are using for it. */
6257 if (warn_aggregate_return
6258 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6259 warning ("function returns an aggregate");
6261 current_function_returns_pointer
6262 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6265 /* Make sure all values used by the optimization passes have sane
6266 defaults. */
6267 void
6268 init_function_for_compilation ()
6270 reg_renumber = 0;
6272 /* No prologue/epilogue insns yet. */
6273 VARRAY_GROW (prologue, 0);
6274 VARRAY_GROW (epilogue, 0);
6275 VARRAY_GROW (sibcall_epilogue, 0);
6278 /* Indicate that the current function uses extra args
6279 not explicitly mentioned in the argument list in any fashion. */
6281 void
6282 mark_varargs ()
6284 current_function_varargs = 1;
6287 /* Expand a call to __main at the beginning of a possible main function. */
6289 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6290 #undef HAS_INIT_SECTION
6291 #define HAS_INIT_SECTION
6292 #endif
6294 void
6295 expand_main_function ()
6297 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6298 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6300 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6301 rtx tmp;
6303 /* Forcibly align the stack. */
6304 #ifdef STACK_GROWS_DOWNWARD
6305 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6306 stack_pointer_rtx, 1, OPTAB_WIDEN);
6307 #else
6308 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6309 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6310 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6311 stack_pointer_rtx, 1, OPTAB_WIDEN);
6312 #endif
6313 if (tmp != stack_pointer_rtx)
6314 emit_move_insn (stack_pointer_rtx, tmp);
6316 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6317 tmp = force_reg (Pmode, const0_rtx);
6318 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6320 #endif
6322 #ifndef HAS_INIT_SECTION
6323 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6324 VOIDmode, 0);
6325 #endif
6328 extern struct obstack permanent_obstack;
6330 /* The PENDING_SIZES represent the sizes of variable-sized types.
6331 Create RTL for the various sizes now (using temporary variables),
6332 so that we can refer to the sizes from the RTL we are generating
6333 for the current function. The PENDING_SIZES are a TREE_LIST. The
6334 TREE_VALUE of each node is a SAVE_EXPR. */
6336 void
6337 expand_pending_sizes (pending_sizes)
6338 tree pending_sizes;
6340 tree tem;
6342 /* Evaluate now the sizes of any types declared among the arguments. */
6343 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6345 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6346 EXPAND_MEMORY_USE_BAD);
6347 /* Flush the queue in case this parameter declaration has
6348 side-effects. */
6349 emit_queue ();
6353 /* Start the RTL for a new function, and set variables used for
6354 emitting RTL.
6355 SUBR is the FUNCTION_DECL node.
6356 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6357 the function's parameters, which must be run at any return statement. */
6359 void
6360 expand_function_start (subr, parms_have_cleanups)
6361 tree subr;
6362 int parms_have_cleanups;
6364 tree tem;
6365 rtx last_ptr = NULL_RTX;
6367 /* Make sure volatile mem refs aren't considered
6368 valid operands of arithmetic insns. */
6369 init_recog_no_volatile ();
6371 /* Set this before generating any memory accesses. */
6372 current_function_check_memory_usage
6373 = (flag_check_memory_usage
6374 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6376 current_function_instrument_entry_exit
6377 = (flag_instrument_function_entry_exit
6378 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6380 current_function_limit_stack
6381 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6383 /* If function gets a static chain arg, store it in the stack frame.
6384 Do this first, so it gets the first stack slot offset. */
6385 if (current_function_needs_context)
6387 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6389 /* Delay copying static chain if it is not a register to avoid
6390 conflicts with regs used for parameters. */
6391 if (! SMALL_REGISTER_CLASSES
6392 || GET_CODE (static_chain_incoming_rtx) == REG)
6393 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6396 /* If the parameters of this function need cleaning up, get a label
6397 for the beginning of the code which executes those cleanups. This must
6398 be done before doing anything with return_label. */
6399 if (parms_have_cleanups)
6400 cleanup_label = gen_label_rtx ();
6401 else
6402 cleanup_label = 0;
6404 /* Make the label for return statements to jump to. Do not special
6405 case machines with special return instructions -- they will be
6406 handled later during jump, ifcvt, or epilogue creation. */
6407 return_label = gen_label_rtx ();
6409 /* Initialize rtx used to return the value. */
6410 /* Do this before assign_parms so that we copy the struct value address
6411 before any library calls that assign parms might generate. */
6413 /* Decide whether to return the value in memory or in a register. */
6414 if (aggregate_value_p (DECL_RESULT (subr)))
6416 /* Returning something that won't go in a register. */
6417 rtx value_address = 0;
6419 #ifdef PCC_STATIC_STRUCT_RETURN
6420 if (current_function_returns_pcc_struct)
6422 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6423 value_address = assemble_static_space (size);
6425 else
6426 #endif
6428 /* Expect to be passed the address of a place to store the value.
6429 If it is passed as an argument, assign_parms will take care of
6430 it. */
6431 if (struct_value_incoming_rtx)
6433 value_address = gen_reg_rtx (Pmode);
6434 emit_move_insn (value_address, struct_value_incoming_rtx);
6437 if (value_address)
6439 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6440 set_mem_attributes (x, DECL_RESULT (subr), 1);
6441 SET_DECL_RTL (DECL_RESULT (subr), x);
6444 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6445 /* If return mode is void, this decl rtl should not be used. */
6446 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6447 else
6449 /* Compute the return values into a pseudo reg, which we will copy
6450 into the true return register after the cleanups are done. */
6452 /* In order to figure out what mode to use for the pseudo, we
6453 figure out what the mode of the eventual return register will
6454 actually be, and use that. */
6455 rtx hard_reg
6456 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6457 subr, 1);
6459 /* Structures that are returned in registers are not aggregate_value_p,
6460 so we may see a PARALLEL. Don't play pseudo games with this. */
6461 if (! REG_P (hard_reg))
6462 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6463 else
6465 /* Create the pseudo. */
6466 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6468 /* Needed because we may need to move this to memory
6469 in case it's a named return value whose address is taken. */
6470 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6474 /* Initialize rtx for parameters and local variables.
6475 In some cases this requires emitting insns. */
6477 assign_parms (subr);
6479 /* Copy the static chain now if it wasn't a register. The delay is to
6480 avoid conflicts with the parameter passing registers. */
6482 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6483 if (GET_CODE (static_chain_incoming_rtx) != REG)
6484 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6486 /* The following was moved from init_function_start.
6487 The move is supposed to make sdb output more accurate. */
6488 /* Indicate the beginning of the function body,
6489 as opposed to parm setup. */
6490 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6492 if (GET_CODE (get_last_insn ()) != NOTE)
6493 emit_note (NULL, NOTE_INSN_DELETED);
6494 parm_birth_insn = get_last_insn ();
6496 context_display = 0;
6497 if (current_function_needs_context)
6499 /* Fetch static chain values for containing functions. */
6500 tem = decl_function_context (current_function_decl);
6501 /* Copy the static chain pointer into a pseudo. If we have
6502 small register classes, copy the value from memory if
6503 static_chain_incoming_rtx is a REG. */
6504 if (tem)
6506 /* If the static chain originally came in a register, put it back
6507 there, then move it out in the next insn. The reason for
6508 this peculiar code is to satisfy function integration. */
6509 if (SMALL_REGISTER_CLASSES
6510 && GET_CODE (static_chain_incoming_rtx) == REG)
6511 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6512 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6515 while (tem)
6517 tree rtlexp = make_node (RTL_EXPR);
6519 RTL_EXPR_RTL (rtlexp) = last_ptr;
6520 context_display = tree_cons (tem, rtlexp, context_display);
6521 tem = decl_function_context (tem);
6522 if (tem == 0)
6523 break;
6524 /* Chain thru stack frames, assuming pointer to next lexical frame
6525 is found at the place we always store it. */
6526 #ifdef FRAME_GROWS_DOWNWARD
6527 last_ptr = plus_constant (last_ptr,
6528 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6529 #endif
6530 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6531 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6532 last_ptr = copy_to_reg (last_ptr);
6534 /* If we are not optimizing, ensure that we know that this
6535 piece of context is live over the entire function. */
6536 if (! optimize)
6537 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6538 save_expr_regs);
6542 if (current_function_instrument_entry_exit)
6544 rtx fun = DECL_RTL (current_function_decl);
6545 if (GET_CODE (fun) == MEM)
6546 fun = XEXP (fun, 0);
6547 else
6548 abort ();
6549 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6550 2, fun, Pmode,
6551 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6553 hard_frame_pointer_rtx),
6554 Pmode);
6557 #ifdef PROFILE_HOOK
6558 if (profile_flag)
6559 PROFILE_HOOK (profile_label_no);
6560 #endif
6562 /* After the display initializations is where the tail-recursion label
6563 should go, if we end up needing one. Ensure we have a NOTE here
6564 since some things (like trampolines) get placed before this. */
6565 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6567 /* Evaluate now the sizes of any types declared among the arguments. */
6568 expand_pending_sizes (nreverse (get_pending_sizes ()));
6570 /* Make sure there is a line number after the function entry setup code. */
6571 force_next_line_note ();
6574 /* Undo the effects of init_dummy_function_start. */
6575 void
6576 expand_dummy_function_end ()
6578 /* End any sequences that failed to be closed due to syntax errors. */
6579 while (in_sequence_p ())
6580 end_sequence ();
6582 /* Outside function body, can't compute type's actual size
6583 until next function's body starts. */
6585 free_after_parsing (cfun);
6586 free_after_compilation (cfun);
6587 cfun = 0;
6590 /* Call DOIT for each hard register used as a return value from
6591 the current function. */
6593 void
6594 diddle_return_value (doit, arg)
6595 void (*doit) PARAMS ((rtx, void *));
6596 void *arg;
6598 rtx outgoing = current_function_return_rtx;
6600 if (! outgoing)
6601 return;
6603 if (GET_CODE (outgoing) == REG)
6604 (*doit) (outgoing, arg);
6605 else if (GET_CODE (outgoing) == PARALLEL)
6607 int i;
6609 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6611 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6613 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6614 (*doit) (x, arg);
6619 static void
6620 do_clobber_return_reg (reg, arg)
6621 rtx reg;
6622 void *arg ATTRIBUTE_UNUSED;
6624 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6627 void
6628 clobber_return_register ()
6630 diddle_return_value (do_clobber_return_reg, NULL);
6632 /* In case we do use pseudo to return value, clobber it too. */
6633 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6635 tree decl_result = DECL_RESULT (current_function_decl);
6636 rtx decl_rtl = DECL_RTL (decl_result);
6637 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6639 do_clobber_return_reg (decl_rtl, NULL);
6644 static void
6645 do_use_return_reg (reg, arg)
6646 rtx reg;
6647 void *arg ATTRIBUTE_UNUSED;
6649 emit_insn (gen_rtx_USE (VOIDmode, reg));
6652 void
6653 use_return_register ()
6655 diddle_return_value (do_use_return_reg, NULL);
6658 /* Generate RTL for the end of the current function.
6659 FILENAME and LINE are the current position in the source file.
6661 It is up to language-specific callers to do cleanups for parameters--
6662 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6664 void
6665 expand_function_end (filename, line, end_bindings)
6666 const char *filename;
6667 int line;
6668 int end_bindings;
6670 tree link;
6671 rtx clobber_after;
6673 #ifdef TRAMPOLINE_TEMPLATE
6674 static rtx initial_trampoline;
6675 #endif
6677 finish_expr_for_function ();
6679 /* If arg_pointer_save_area was referenced only from a nested
6680 function, we will not have initialized it yet. Do that now. */
6681 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6682 get_arg_pointer_save_area (cfun);
6684 #ifdef NON_SAVING_SETJMP
6685 /* Don't put any variables in registers if we call setjmp
6686 on a machine that fails to restore the registers. */
6687 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6689 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6690 setjmp_protect (DECL_INITIAL (current_function_decl));
6692 setjmp_protect_args ();
6694 #endif
6696 /* Initialize any trampolines required by this function. */
6697 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6699 tree function = TREE_PURPOSE (link);
6700 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6701 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6702 #ifdef TRAMPOLINE_TEMPLATE
6703 rtx blktramp;
6704 #endif
6705 rtx seq;
6707 #ifdef TRAMPOLINE_TEMPLATE
6708 /* First make sure this compilation has a template for
6709 initializing trampolines. */
6710 if (initial_trampoline == 0)
6712 initial_trampoline
6713 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6714 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6716 ggc_add_rtx_root (&initial_trampoline, 1);
6718 #endif
6720 /* Generate insns to initialize the trampoline. */
6721 start_sequence ();
6722 tramp = round_trampoline_addr (XEXP (tramp, 0));
6723 #ifdef TRAMPOLINE_TEMPLATE
6724 blktramp = replace_equiv_address (initial_trampoline, tramp);
6725 emit_block_move (blktramp, initial_trampoline,
6726 GEN_INT (TRAMPOLINE_SIZE));
6727 #endif
6728 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6729 seq = get_insns ();
6730 end_sequence ();
6732 /* Put those insns at entry to the containing function (this one). */
6733 emit_insns_before (seq, tail_recursion_reentry);
6736 /* If we are doing stack checking and this function makes calls,
6737 do a stack probe at the start of the function to ensure we have enough
6738 space for another stack frame. */
6739 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6741 rtx insn, seq;
6743 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6744 if (GET_CODE (insn) == CALL_INSN)
6746 start_sequence ();
6747 probe_stack_range (STACK_CHECK_PROTECT,
6748 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6749 seq = get_insns ();
6750 end_sequence ();
6751 emit_insns_before (seq, tail_recursion_reentry);
6752 break;
6756 /* Warn about unused parms if extra warnings were specified. */
6757 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6758 warning. WARN_UNUSED_PARAMETER is negative when set by
6759 -Wunused. */
6760 if (warn_unused_parameter > 0
6761 || (warn_unused_parameter < 0 && extra_warnings))
6763 tree decl;
6765 for (decl = DECL_ARGUMENTS (current_function_decl);
6766 decl; decl = TREE_CHAIN (decl))
6767 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6768 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6769 warning_with_decl (decl, "unused parameter `%s'");
6772 /* Delete handlers for nonlocal gotos if nothing uses them. */
6773 if (nonlocal_goto_handler_slots != 0
6774 && ! current_function_has_nonlocal_label)
6775 delete_handlers ();
6777 /* End any sequences that failed to be closed due to syntax errors. */
6778 while (in_sequence_p ())
6779 end_sequence ();
6781 /* Outside function body, can't compute type's actual size
6782 until next function's body starts. */
6783 immediate_size_expand--;
6785 clear_pending_stack_adjust ();
6786 do_pending_stack_adjust ();
6788 /* Mark the end of the function body.
6789 If control reaches this insn, the function can drop through
6790 without returning a value. */
6791 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6793 /* Must mark the last line number note in the function, so that the test
6794 coverage code can avoid counting the last line twice. This just tells
6795 the code to ignore the immediately following line note, since there
6796 already exists a copy of this note somewhere above. This line number
6797 note is still needed for debugging though, so we can't delete it. */
6798 if (flag_test_coverage)
6799 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6801 /* Output a linenumber for the end of the function.
6802 SDB depends on this. */
6803 emit_line_note_force (filename, line);
6805 /* Before the return label (if any), clobber the return
6806 registers so that they are not propagated live to the rest of
6807 the function. This can only happen with functions that drop
6808 through; if there had been a return statement, there would
6809 have either been a return rtx, or a jump to the return label.
6811 We delay actual code generation after the current_function_value_rtx
6812 is computed. */
6813 clobber_after = get_last_insn ();
6815 /* Output the label for the actual return from the function,
6816 if one is expected. This happens either because a function epilogue
6817 is used instead of a return instruction, or because a return was done
6818 with a goto in order to run local cleanups, or because of pcc-style
6819 structure returning. */
6820 if (return_label)
6821 emit_label (return_label);
6823 /* C++ uses this. */
6824 if (end_bindings)
6825 expand_end_bindings (0, 0, 0);
6827 if (current_function_instrument_entry_exit)
6829 rtx fun = DECL_RTL (current_function_decl);
6830 if (GET_CODE (fun) == MEM)
6831 fun = XEXP (fun, 0);
6832 else
6833 abort ();
6834 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6835 2, fun, Pmode,
6836 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6838 hard_frame_pointer_rtx),
6839 Pmode);
6842 /* Let except.c know where it should emit the call to unregister
6843 the function context for sjlj exceptions. */
6844 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6845 sjlj_emit_function_exit_after (get_last_insn ());
6847 /* If we had calls to alloca, and this machine needs
6848 an accurate stack pointer to exit the function,
6849 insert some code to save and restore the stack pointer. */
6850 #ifdef EXIT_IGNORE_STACK
6851 if (! EXIT_IGNORE_STACK)
6852 #endif
6853 if (current_function_calls_alloca)
6855 rtx tem = 0;
6857 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6858 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6861 /* If scalar return value was computed in a pseudo-reg, or was a named
6862 return value that got dumped to the stack, copy that to the hard
6863 return register. */
6864 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6866 tree decl_result = DECL_RESULT (current_function_decl);
6867 rtx decl_rtl = DECL_RTL (decl_result);
6869 if (REG_P (decl_rtl)
6870 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6871 : DECL_REGISTER (decl_result))
6873 rtx real_decl_rtl;
6875 #ifdef FUNCTION_OUTGOING_VALUE
6876 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6877 current_function_decl);
6878 #else
6879 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6880 current_function_decl);
6881 #endif
6882 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6884 /* If this is a BLKmode structure being returned in registers,
6885 then use the mode computed in expand_return. Note that if
6886 decl_rtl is memory, then its mode may have been changed,
6887 but that current_function_return_rtx has not. */
6888 if (GET_MODE (real_decl_rtl) == BLKmode)
6889 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6891 /* If a named return value dumped decl_return to memory, then
6892 we may need to re-do the PROMOTE_MODE signed/unsigned
6893 extension. */
6894 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6896 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6898 #ifdef PROMOTE_FUNCTION_RETURN
6899 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6900 &unsignedp, 1);
6901 #endif
6903 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6905 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6906 emit_group_load (real_decl_rtl, decl_rtl,
6907 int_size_in_bytes (TREE_TYPE (decl_result)));
6908 else
6909 emit_move_insn (real_decl_rtl, decl_rtl);
6911 /* The delay slot scheduler assumes that current_function_return_rtx
6912 holds the hard register containing the return value, not a
6913 temporary pseudo. */
6914 current_function_return_rtx = real_decl_rtl;
6918 /* If returning a structure, arrange to return the address of the value
6919 in a place where debuggers expect to find it.
6921 If returning a structure PCC style,
6922 the caller also depends on this value.
6923 And current_function_returns_pcc_struct is not necessarily set. */
6924 if (current_function_returns_struct
6925 || current_function_returns_pcc_struct)
6927 rtx value_address
6928 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6929 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6930 #ifdef FUNCTION_OUTGOING_VALUE
6931 rtx outgoing
6932 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6933 current_function_decl);
6934 #else
6935 rtx outgoing
6936 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6937 #endif
6939 /* Mark this as a function return value so integrate will delete the
6940 assignment and USE below when inlining this function. */
6941 REG_FUNCTION_VALUE_P (outgoing) = 1;
6943 #ifdef POINTERS_EXTEND_UNSIGNED
6944 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6945 if (GET_MODE (outgoing) != GET_MODE (value_address))
6946 value_address = convert_memory_address (GET_MODE (outgoing),
6947 value_address);
6948 #endif
6950 emit_move_insn (outgoing, value_address);
6952 /* Show return register used to hold result (in this case the address
6953 of the result. */
6954 current_function_return_rtx = outgoing;
6957 /* If this is an implementation of throw, do what's necessary to
6958 communicate between __builtin_eh_return and the epilogue. */
6959 expand_eh_return ();
6961 /* Emit the actual code to clobber return register. */
6963 rtx seq, after;
6965 start_sequence ();
6966 clobber_return_register ();
6967 seq = gen_sequence ();
6968 end_sequence ();
6970 after = emit_insn_after (seq, clobber_after);
6972 if (clobber_after != after)
6973 cfun->x_clobber_return_insn = after;
6976 /* ??? This should no longer be necessary since stupid is no longer with
6977 us, but there are some parts of the compiler (eg reload_combine, and
6978 sh mach_dep_reorg) that still try and compute their own lifetime info
6979 instead of using the general framework. */
6980 use_return_register ();
6982 /* Fix up any gotos that jumped out to the outermost
6983 binding level of the function.
6984 Must follow emitting RETURN_LABEL. */
6986 /* If you have any cleanups to do at this point,
6987 and they need to create temporary variables,
6988 then you will lose. */
6989 expand_fixups (get_insns ());
6993 get_arg_pointer_save_area (f)
6994 struct function *f;
6996 rtx ret = f->x_arg_pointer_save_area;
6998 if (! ret)
7000 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7001 f->x_arg_pointer_save_area = ret;
7004 if (f == cfun && ! f->arg_pointer_save_area_init)
7006 rtx seq;
7008 /* Save the arg pointer at the beginning of the function. The
7009 generated stack slot may not be a valid memory address, so we
7010 have to check it and fix it if necessary. */
7011 start_sequence ();
7012 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7013 seq = gen_sequence ();
7014 end_sequence ();
7016 push_topmost_sequence ();
7017 emit_insn_after (seq, get_insns ());
7018 pop_topmost_sequence ();
7021 return ret;
7024 /* Extend a vector that records the INSN_UIDs of INSNS (either a
7025 sequence or a single insn). */
7027 static void
7028 record_insns (insns, vecp)
7029 rtx insns;
7030 varray_type *vecp;
7032 if (GET_CODE (insns) == SEQUENCE)
7034 int len = XVECLEN (insns, 0);
7035 int i = VARRAY_SIZE (*vecp);
7037 VARRAY_GROW (*vecp, i + len);
7038 while (--len >= 0)
7040 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
7041 ++i;
7044 else
7046 int i = VARRAY_SIZE (*vecp);
7047 VARRAY_GROW (*vecp, i + 1);
7048 VARRAY_INT (*vecp, i) = INSN_UID (insns);
7052 /* Determine how many INSN_UIDs in VEC are part of INSN. */
7054 static int
7055 contains (insn, vec)
7056 rtx insn;
7057 varray_type vec;
7059 int i, j;
7061 if (GET_CODE (insn) == INSN
7062 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7064 int count = 0;
7065 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7066 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7067 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7068 count++;
7069 return count;
7071 else
7073 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7074 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7075 return 1;
7077 return 0;
7081 prologue_epilogue_contains (insn)
7082 rtx insn;
7084 if (contains (insn, prologue))
7085 return 1;
7086 if (contains (insn, epilogue))
7087 return 1;
7088 return 0;
7092 sibcall_epilogue_contains (insn)
7093 rtx insn;
7095 if (sibcall_epilogue)
7096 return contains (insn, sibcall_epilogue);
7097 return 0;
7100 #ifdef HAVE_return
7101 /* Insert gen_return at the end of block BB. This also means updating
7102 block_for_insn appropriately. */
7104 static void
7105 emit_return_into_block (bb, line_note)
7106 basic_block bb;
7107 rtx line_note;
7109 rtx p, end;
7111 p = NEXT_INSN (bb->end);
7112 end = emit_jump_insn_after (gen_return (), bb->end);
7113 if (line_note)
7114 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7115 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7117 #endif /* HAVE_return */
7119 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7121 /* These functions convert the epilogue into a variant that does not modify the
7122 stack pointer. This is used in cases where a function returns an object
7123 whose size is not known until it is computed. The called function leavs the
7124 object on the stack, leaves the stack depressed, and returns a pointer to
7125 the object.
7127 What we need to do is track all modifications and references to the stack
7128 pointer, deleting the modifications and changing the references to point to
7129 the location the stack pointer would have pointed to had the modifications
7130 taken place.
7132 These functions need to be portable so we need to make as few assumptions
7133 about the epilogue as we can. However, the epilogue basically contains
7134 three things: instructions to reset the stack pointer, instructions to
7135 reload registers, possibly including the frame pointer, and an
7136 instruction to return to the caller.
7138 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7139 We also make no attempt to validate the insns we make since if they are
7140 invalid, we probably can't do anything valid. The intent is that these
7141 routines get "smarter" as more and more machines start to use them and
7142 they try operating on different epilogues.
7144 We use the following structure to track what the part of the epilogue that
7145 we've already processed has done. We keep two copies of the SP equivalence,
7146 one for use during the insn we are processing and one for use in the next
7147 insn. The difference is because one part of a PARALLEL may adjust SP
7148 and the other may use it. */
7150 struct epi_info
7152 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7153 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7154 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7155 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7156 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7157 should be set to once we no longer need
7158 its value. */
7161 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7162 static void emit_equiv_load PARAMS ((struct epi_info *));
7164 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7165 to the stack pointer. Return the new sequence. */
7167 static rtx
7168 keep_stack_depressed (seq)
7169 rtx seq;
7171 int i, j;
7172 struct epi_info info;
7174 /* If the epilogue is just a single instruction, it ust be OK as is. */
7176 if (GET_CODE (seq) != SEQUENCE)
7177 return seq;
7179 /* Otherwise, start a sequence, initialize the information we have, and
7180 process all the insns we were given. */
7181 start_sequence ();
7183 info.sp_equiv_reg = stack_pointer_rtx;
7184 info.sp_offset = 0;
7185 info.equiv_reg_src = 0;
7187 for (i = 0; i < XVECLEN (seq, 0); i++)
7189 rtx insn = XVECEXP (seq, 0, i);
7191 if (!INSN_P (insn))
7193 add_insn (insn);
7194 continue;
7197 /* If this insn references the register that SP is equivalent to and
7198 we have a pending load to that register, we must force out the load
7199 first and then indicate we no longer know what SP's equivalent is. */
7200 if (info.equiv_reg_src != 0
7201 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7203 emit_equiv_load (&info);
7204 info.sp_equiv_reg = 0;
7207 info.new_sp_equiv_reg = info.sp_equiv_reg;
7208 info.new_sp_offset = info.sp_offset;
7210 /* If this is a (RETURN) and the return address is on the stack,
7211 update the address and change to an indirect jump. */
7212 if (GET_CODE (PATTERN (insn)) == RETURN
7213 || (GET_CODE (PATTERN (insn)) == PARALLEL
7214 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7216 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7217 rtx base = 0;
7218 HOST_WIDE_INT offset = 0;
7219 rtx jump_insn, jump_set;
7221 /* If the return address is in a register, we can emit the insn
7222 unchanged. Otherwise, it must be a MEM and we see what the
7223 base register and offset are. In any case, we have to emit any
7224 pending load to the equivalent reg of SP, if any. */
7225 if (GET_CODE (retaddr) == REG)
7227 emit_equiv_load (&info);
7228 add_insn (insn);
7229 continue;
7231 else if (GET_CODE (retaddr) == MEM
7232 && GET_CODE (XEXP (retaddr, 0)) == REG)
7233 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7234 else if (GET_CODE (retaddr) == MEM
7235 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7236 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7237 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7239 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7240 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7242 else
7243 abort ();
7245 /* If the base of the location containing the return pointer
7246 is SP, we must update it with the replacement address. Otherwise,
7247 just build the necessary MEM. */
7248 retaddr = plus_constant (base, offset);
7249 if (base == stack_pointer_rtx)
7250 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7251 plus_constant (info.sp_equiv_reg,
7252 info.sp_offset));
7254 retaddr = gen_rtx_MEM (Pmode, retaddr);
7256 /* If there is a pending load to the equivalent register for SP
7257 and we reference that register, we must load our address into
7258 a scratch register and then do that load. */
7259 if (info.equiv_reg_src
7260 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7262 unsigned int regno;
7263 rtx reg;
7265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7266 if (HARD_REGNO_MODE_OK (regno, Pmode)
7267 && !fixed_regs[regno] && call_used_regs[regno]
7268 && !FUNCTION_VALUE_REGNO_P (regno))
7269 break;
7271 if (regno == FIRST_PSEUDO_REGISTER)
7272 abort ();
7274 reg = gen_rtx_REG (Pmode, regno);
7275 emit_move_insn (reg, retaddr);
7276 retaddr = reg;
7279 emit_equiv_load (&info);
7280 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7282 /* Show the SET in the above insn is a RETURN. */
7283 jump_set = single_set (jump_insn);
7284 if (jump_set == 0)
7285 abort ();
7286 else
7287 SET_IS_RETURN_P (jump_set) = 1;
7290 /* If SP is not mentioned in the pattern and its equivalent register, if
7291 any, is not modified, just emit it. Otherwise, if neither is set,
7292 replace the reference to SP and emit the insn. If none of those are
7293 true, handle each SET individually. */
7294 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7295 && (info.sp_equiv_reg == stack_pointer_rtx
7296 || !reg_set_p (info.sp_equiv_reg, insn)))
7297 add_insn (insn);
7298 else if (! reg_set_p (stack_pointer_rtx, insn)
7299 && (info.sp_equiv_reg == stack_pointer_rtx
7300 || !reg_set_p (info.sp_equiv_reg, insn)))
7302 if (! validate_replace_rtx (stack_pointer_rtx,
7303 plus_constant (info.sp_equiv_reg,
7304 info.sp_offset),
7305 insn))
7306 abort ();
7308 add_insn (insn);
7310 else if (GET_CODE (PATTERN (insn)) == SET)
7311 handle_epilogue_set (PATTERN (insn), &info);
7312 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7314 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7315 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7316 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7318 else
7319 add_insn (insn);
7321 info.sp_equiv_reg = info.new_sp_equiv_reg;
7322 info.sp_offset = info.new_sp_offset;
7325 seq = gen_sequence ();
7326 end_sequence ();
7327 return seq;
7330 /* SET is a SET from an insn in the epilogue. P is a pointr to the epi_info
7331 structure that contains information about what we've seen so far. We
7332 process this SET by either updating that data or by emitting one or
7333 more insns. */
7335 static void
7336 handle_epilogue_set (set, p)
7337 rtx set;
7338 struct epi_info *p;
7340 /* First handle the case where we are setting SP. Record what it is being
7341 set from. If unknown, abort. */
7342 if (reg_set_p (stack_pointer_rtx, set))
7344 if (SET_DEST (set) != stack_pointer_rtx)
7345 abort ();
7347 if (GET_CODE (SET_SRC (set)) == PLUS
7348 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7350 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7351 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7353 else
7354 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7356 /* If we are adjusting SP, we adjust from the old data. */
7357 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7359 p->new_sp_equiv_reg = p->sp_equiv_reg;
7360 p->new_sp_offset += p->sp_offset;
7363 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7364 abort ();
7366 return;
7369 /* Next handle the case where we are setting SP's equivalent register.
7370 If we already have a value to set it to, abort. We could update, but
7371 there seems little point in handling that case. */
7372 else if (p->sp_equiv_reg != 0 && reg_set_p (p->sp_equiv_reg, set))
7374 if (!rtx_equal_p (p->sp_equiv_reg, SET_DEST (set))
7375 || p->equiv_reg_src != 0)
7376 abort ();
7377 else
7378 p->equiv_reg_src
7379 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7380 plus_constant (p->sp_equiv_reg,
7381 p->sp_offset));
7384 /* Otherwise, replace any references to SP in the insn to its new value
7385 and emit the insn. */
7386 else
7388 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7389 plus_constant (p->sp_equiv_reg,
7390 p->sp_offset));
7391 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7392 plus_constant (p->sp_equiv_reg,
7393 p->sp_offset));
7394 emit_insn (set);
7398 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7400 static void
7401 emit_equiv_load (p)
7402 struct epi_info *p;
7404 if (p->equiv_reg_src != 0)
7405 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7407 p->equiv_reg_src = 0;
7409 #endif
7411 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7412 this into place with notes indicating where the prologue ends and where
7413 the epilogue begins. Update the basic block information when possible. */
7415 void
7416 thread_prologue_and_epilogue_insns (f)
7417 rtx f ATTRIBUTE_UNUSED;
7419 int inserted = 0;
7420 edge e;
7421 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7422 rtx seq;
7423 #endif
7424 #ifdef HAVE_prologue
7425 rtx prologue_end = NULL_RTX;
7426 #endif
7427 #if defined (HAVE_epilogue) || defined(HAVE_return)
7428 rtx epilogue_end = NULL_RTX;
7429 #endif
7431 #ifdef HAVE_prologue
7432 if (HAVE_prologue)
7434 start_sequence ();
7435 seq = gen_prologue ();
7436 emit_insn (seq);
7438 /* Retain a map of the prologue insns. */
7439 if (GET_CODE (seq) != SEQUENCE)
7440 seq = get_insns ();
7441 record_insns (seq, &prologue);
7442 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7444 seq = gen_sequence ();
7445 end_sequence ();
7447 /* Can't deal with multiple successsors of the entry block
7448 at the moment. Function should always have at least one
7449 entry point. */
7450 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7451 abort ();
7453 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7454 inserted = 1;
7456 #endif
7458 /* If the exit block has no non-fake predecessors, we don't need
7459 an epilogue. */
7460 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7461 if ((e->flags & EDGE_FAKE) == 0)
7462 break;
7463 if (e == NULL)
7464 goto epilogue_done;
7466 #ifdef HAVE_return
7467 if (optimize && HAVE_return)
7469 /* If we're allowed to generate a simple return instruction,
7470 then by definition we don't need a full epilogue. Examine
7471 the block that falls through to EXIT. If it does not
7472 contain any code, examine its predecessors and try to
7473 emit (conditional) return instructions. */
7475 basic_block last;
7476 edge e_next;
7477 rtx label;
7479 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7480 if (e->flags & EDGE_FALLTHRU)
7481 break;
7482 if (e == NULL)
7483 goto epilogue_done;
7484 last = e->src;
7486 /* Verify that there are no active instructions in the last block. */
7487 label = last->end;
7488 while (label && GET_CODE (label) != CODE_LABEL)
7490 if (active_insn_p (label))
7491 break;
7492 label = PREV_INSN (label);
7495 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7497 rtx epilogue_line_note = NULL_RTX;
7499 /* Locate the line number associated with the closing brace,
7500 if we can find one. */
7501 for (seq = get_last_insn ();
7502 seq && ! active_insn_p (seq);
7503 seq = PREV_INSN (seq))
7504 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7506 epilogue_line_note = seq;
7507 break;
7510 for (e = last->pred; e; e = e_next)
7512 basic_block bb = e->src;
7513 rtx jump;
7515 e_next = e->pred_next;
7516 if (bb == ENTRY_BLOCK_PTR)
7517 continue;
7519 jump = bb->end;
7520 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7521 continue;
7523 /* If we have an unconditional jump, we can replace that
7524 with a simple return instruction. */
7525 if (simplejump_p (jump))
7527 emit_return_into_block (bb, epilogue_line_note);
7528 delete_insn (jump);
7531 /* If we have a conditional jump, we can try to replace
7532 that with a conditional return instruction. */
7533 else if (condjump_p (jump))
7535 rtx ret, *loc;
7537 ret = SET_SRC (PATTERN (jump));
7538 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7539 loc = &XEXP (ret, 1);
7540 else
7541 loc = &XEXP (ret, 2);
7542 ret = gen_rtx_RETURN (VOIDmode);
7544 if (! validate_change (jump, loc, ret, 0))
7545 continue;
7546 if (JUMP_LABEL (jump))
7547 LABEL_NUSES (JUMP_LABEL (jump))--;
7549 /* If this block has only one successor, it both jumps
7550 and falls through to the fallthru block, so we can't
7551 delete the edge. */
7552 if (bb->succ->succ_next == NULL)
7553 continue;
7555 else
7556 continue;
7558 /* Fix up the CFG for the successful change we just made. */
7559 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7562 /* Emit a return insn for the exit fallthru block. Whether
7563 this is still reachable will be determined later. */
7565 emit_barrier_after (last->end);
7566 emit_return_into_block (last, epilogue_line_note);
7567 epilogue_end = last->end;
7568 last->succ->flags &= ~EDGE_FALLTHRU;
7569 goto epilogue_done;
7572 #endif
7573 #ifdef HAVE_epilogue
7574 if (HAVE_epilogue)
7576 /* Find the edge that falls through to EXIT. Other edges may exist
7577 due to RETURN instructions, but those don't need epilogues.
7578 There really shouldn't be a mixture -- either all should have
7579 been converted or none, however... */
7581 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7582 if (e->flags & EDGE_FALLTHRU)
7583 break;
7584 if (e == NULL)
7585 goto epilogue_done;
7587 start_sequence ();
7588 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7590 seq = gen_epilogue ();
7592 #ifdef INCOMING_RETURN_ADDR_RTX
7593 /* If this function returns with the stack depressed and we can support
7594 it, massage the epilogue to actually do that. */
7595 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7596 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7597 seq = keep_stack_depressed (seq);
7598 #endif
7600 emit_jump_insn (seq);
7602 /* Retain a map of the epilogue insns. */
7603 if (GET_CODE (seq) != SEQUENCE)
7604 seq = get_insns ();
7605 record_insns (seq, &epilogue);
7607 seq = gen_sequence ();
7608 end_sequence ();
7610 insert_insn_on_edge (seq, e);
7611 inserted = 1;
7613 #endif
7614 epilogue_done:
7616 if (inserted)
7617 commit_edge_insertions ();
7619 #ifdef HAVE_sibcall_epilogue
7620 /* Emit sibling epilogues before any sibling call sites. */
7621 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7623 basic_block bb = e->src;
7624 rtx insn = bb->end;
7625 rtx i;
7626 rtx newinsn;
7628 if (GET_CODE (insn) != CALL_INSN
7629 || ! SIBLING_CALL_P (insn))
7630 continue;
7632 start_sequence ();
7633 seq = gen_sibcall_epilogue ();
7634 end_sequence ();
7636 i = PREV_INSN (insn);
7637 newinsn = emit_insn_before (seq, insn);
7639 /* Retain a map of the epilogue insns. Used in life analysis to
7640 avoid getting rid of sibcall epilogue insns. */
7641 record_insns (GET_CODE (seq) == SEQUENCE
7642 ? seq : newinsn, &sibcall_epilogue);
7644 #endif
7646 #ifdef HAVE_prologue
7647 if (prologue_end)
7649 rtx insn, prev;
7651 /* GDB handles `break f' by setting a breakpoint on the first
7652 line note after the prologue. Which means (1) that if
7653 there are line number notes before where we inserted the
7654 prologue we should move them, and (2) we should generate a
7655 note before the end of the first basic block, if there isn't
7656 one already there.
7658 ??? This behaviour is completely broken when dealing with
7659 multiple entry functions. We simply place the note always
7660 into first basic block and let alternate entry points
7661 to be missed.
7664 for (insn = prologue_end; insn; insn = prev)
7666 prev = PREV_INSN (insn);
7667 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7669 /* Note that we cannot reorder the first insn in the
7670 chain, since rest_of_compilation relies on that
7671 remaining constant. */
7672 if (prev == NULL)
7673 break;
7674 reorder_insns (insn, insn, prologue_end);
7678 /* Find the last line number note in the first block. */
7679 for (insn = BASIC_BLOCK (0)->end;
7680 insn != prologue_end && insn;
7681 insn = PREV_INSN (insn))
7682 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7683 break;
7685 /* If we didn't find one, make a copy of the first line number
7686 we run across. */
7687 if (! insn)
7689 for (insn = next_active_insn (prologue_end);
7690 insn;
7691 insn = PREV_INSN (insn))
7692 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7694 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7695 NOTE_LINE_NUMBER (insn),
7696 prologue_end);
7697 break;
7701 #endif
7702 #ifdef HAVE_epilogue
7703 if (epilogue_end)
7705 rtx insn, next;
7707 /* Similarly, move any line notes that appear after the epilogue.
7708 There is no need, however, to be quite so anal about the existence
7709 of such a note. */
7710 for (insn = epilogue_end; insn; insn = next)
7712 next = NEXT_INSN (insn);
7713 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7714 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7717 #endif
7720 /* Reposition the prologue-end and epilogue-begin notes after instruction
7721 scheduling and delayed branch scheduling. */
7723 void
7724 reposition_prologue_and_epilogue_notes (f)
7725 rtx f ATTRIBUTE_UNUSED;
7727 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7728 int len;
7730 if ((len = VARRAY_SIZE (prologue)) > 0)
7732 rtx insn, note = 0;
7734 /* Scan from the beginning until we reach the last prologue insn.
7735 We apparently can't depend on basic_block_{head,end} after
7736 reorg has run. */
7737 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7739 if (GET_CODE (insn) == NOTE)
7741 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7742 note = insn;
7744 else if ((len -= contains (insn, prologue)) == 0)
7746 rtx next;
7747 /* Find the prologue-end note if we haven't already, and
7748 move it to just after the last prologue insn. */
7749 if (note == 0)
7751 for (note = insn; (note = NEXT_INSN (note));)
7752 if (GET_CODE (note) == NOTE
7753 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7754 break;
7757 next = NEXT_INSN (note);
7759 /* Whether or not we can depend on BLOCK_HEAD,
7760 attempt to keep it up-to-date. */
7761 if (BLOCK_HEAD (0) == note)
7762 BLOCK_HEAD (0) = next;
7764 remove_insn (note);
7765 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7766 if (GET_CODE (insn) == CODE_LABEL)
7767 insn = NEXT_INSN (insn);
7768 add_insn_after (note, insn);
7773 if ((len = VARRAY_SIZE (epilogue)) > 0)
7775 rtx insn, note = 0;
7777 /* Scan from the end until we reach the first epilogue insn.
7778 We apparently can't depend on basic_block_{head,end} after
7779 reorg has run. */
7780 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7782 if (GET_CODE (insn) == NOTE)
7784 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7785 note = insn;
7787 else if ((len -= contains (insn, epilogue)) == 0)
7789 /* Find the epilogue-begin note if we haven't already, and
7790 move it to just before the first epilogue insn. */
7791 if (note == 0)
7793 for (note = insn; (note = PREV_INSN (note));)
7794 if (GET_CODE (note) == NOTE
7795 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7796 break;
7799 /* Whether or not we can depend on BLOCK_HEAD,
7800 attempt to keep it up-to-date. */
7801 if (n_basic_blocks
7802 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7803 BLOCK_HEAD (n_basic_blocks-1) = note;
7805 remove_insn (note);
7806 add_insn_before (note, insn);
7810 #endif /* HAVE_prologue or HAVE_epilogue */
7813 /* Mark P for GC. */
7815 static void
7816 mark_function_status (p)
7817 struct function *p;
7819 struct var_refs_queue *q;
7820 struct temp_slot *t;
7821 int i;
7822 rtx *r;
7824 if (p == 0)
7825 return;
7827 ggc_mark_rtx (p->arg_offset_rtx);
7829 if (p->x_parm_reg_stack_loc)
7830 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7831 i > 0; --i, ++r)
7832 ggc_mark_rtx (*r);
7834 ggc_mark_rtx (p->return_rtx);
7835 ggc_mark_rtx (p->x_cleanup_label);
7836 ggc_mark_rtx (p->x_return_label);
7837 ggc_mark_rtx (p->x_save_expr_regs);
7838 ggc_mark_rtx (p->x_stack_slot_list);
7839 ggc_mark_rtx (p->x_parm_birth_insn);
7840 ggc_mark_rtx (p->x_tail_recursion_label);
7841 ggc_mark_rtx (p->x_tail_recursion_reentry);
7842 ggc_mark_rtx (p->internal_arg_pointer);
7843 ggc_mark_rtx (p->x_arg_pointer_save_area);
7844 ggc_mark_tree (p->x_rtl_expr_chain);
7845 ggc_mark_rtx (p->x_last_parm_insn);
7846 ggc_mark_tree (p->x_context_display);
7847 ggc_mark_tree (p->x_trampoline_list);
7848 ggc_mark_rtx (p->epilogue_delay_list);
7849 ggc_mark_rtx (p->x_clobber_return_insn);
7851 for (t = p->x_temp_slots; t != 0; t = t->next)
7853 ggc_mark (t);
7854 ggc_mark_rtx (t->slot);
7855 ggc_mark_rtx (t->address);
7856 ggc_mark_tree (t->rtl_expr);
7857 ggc_mark_tree (t->type);
7860 for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
7862 ggc_mark (q);
7863 ggc_mark_rtx (q->modified);
7866 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7867 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7868 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7869 ggc_mark_tree (p->x_nonlocal_labels);
7871 mark_hard_reg_initial_vals (p);
7874 /* Mark the struct function pointed to by *ARG for GC, if it is not
7875 NULL. This is used to mark the current function and the outer
7876 function chain. */
7878 static void
7879 maybe_mark_struct_function (arg)
7880 void *arg;
7882 struct function *f = *(struct function **) arg;
7884 if (f == 0)
7885 return;
7887 ggc_mark_struct_function (f);
7890 /* Mark a struct function * for GC. This is called from ggc-common.c. */
7892 void
7893 ggc_mark_struct_function (f)
7894 struct function *f;
7896 ggc_mark (f);
7897 ggc_mark_tree (f->decl);
7899 mark_function_status (f);
7900 mark_eh_status (f->eh);
7901 mark_stmt_status (f->stmt);
7902 mark_expr_status (f->expr);
7903 mark_emit_status (f->emit);
7904 mark_varasm_status (f->varasm);
7906 if (mark_machine_status)
7907 (*mark_machine_status) (f);
7908 if (mark_lang_status)
7909 (*mark_lang_status) (f);
7911 if (f->original_arg_vector)
7912 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7913 if (f->original_decl_initial)
7914 ggc_mark_tree (f->original_decl_initial);
7915 if (f->outer)
7916 ggc_mark_struct_function (f->outer);
7919 /* Called once, at initialization, to initialize function.c. */
7921 void
7922 init_function_once ()
7924 ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
7925 ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
7926 maybe_mark_struct_function);
7928 VARRAY_INT_INIT (prologue, 0, "prologue");
7929 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7930 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");