Remove some compile time warnings about duplicate definitions.
[official-gcc.git] / gcc / function.c
blobc17a04ae25104342c002daa8fbd36651e0cd4955
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hash.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't contain any instructions
106 that can throw an exception. This is set prior to final. */
108 int current_function_nothrow;
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero.
122 calls.c:emit_library_call_value_1 uses it to set up
123 post-instantiation libcalls. */
124 int virtuals_instantiated;
126 /* These variables hold pointers to functions to create and destroy
127 target specific, per-function data structures. */
128 void (*init_machine_status) PARAMS ((struct function *));
129 void (*free_machine_status) PARAMS ((struct function *));
130 /* This variable holds a pointer to a function to register any
131 data items in the target specific, per-function data structure
132 that will need garbage collection. */
133 void (*mark_machine_status) PARAMS ((struct function *));
135 /* Likewise, but for language-specific data. */
136 void (*init_lang_status) PARAMS ((struct function *));
137 void (*save_lang_status) PARAMS ((struct function *));
138 void (*restore_lang_status) PARAMS ((struct function *));
139 void (*mark_lang_status) PARAMS ((struct function *));
140 void (*free_lang_status) PARAMS ((struct function *));
142 /* The FUNCTION_DECL for an inline function currently being expanded. */
143 tree inline_function_decl;
145 /* The currently compiled function. */
146 struct function *cfun = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static varray_type prologue;
150 static varray_type epilogue;
152 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
153 in this function. */
154 static varray_type sibcall_epilogue;
156 /* In order to evaluate some expressions, such as function calls returning
157 structures in memory, we need to temporarily allocate stack locations.
158 We record each allocated temporary in the following structure.
160 Associated with each temporary slot is a nesting level. When we pop up
161 one level, all temporaries associated with the previous level are freed.
162 Normally, all temporaries are freed after the execution of the statement
163 in which they were created. However, if we are inside a ({...}) grouping,
164 the result may be in a temporary and hence must be preserved. If the
165 result could be in a temporary, we preserve it if we can determine which
166 one it is in. If we cannot determine which temporary may contain the
167 result, all temporaries are preserved. A temporary is preserved by
168 pretending it was allocated at the previous nesting level.
170 Automatic variables are also assigned temporary slots, at the nesting
171 level where they are defined. They are marked a "kept" so that
172 free_temp_slots will not free them. */
174 struct temp_slot
176 /* Points to next temporary slot. */
177 struct temp_slot *next;
178 /* The rtx to used to reference the slot. */
179 rtx slot;
180 /* The rtx used to represent the address if not the address of the
181 slot above. May be an EXPR_LIST if multiple addresses exist. */
182 rtx address;
183 /* The alignment (in bits) of the slot. */
184 unsigned int align;
185 /* The size, in units, of the slot. */
186 HOST_WIDE_INT size;
187 /* The type of the object in the slot, or zero if it doesn't correspond
188 to a type. We use this to determine whether a slot can be reused.
189 It can be reused if objects of the type of the new slot will always
190 conflict with objects of the type of the old slot. */
191 tree type;
192 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
193 tree rtl_expr;
194 /* Non-zero if this temporary is currently in use. */
195 char in_use;
196 /* Non-zero if this temporary has its address taken. */
197 char addr_taken;
198 /* Nesting level at which this slot is being used. */
199 int level;
200 /* Non-zero if this should survive a call to free_temp_slots. */
201 int keep;
202 /* The offset of the slot from the frame_pointer, including extra space
203 for alignment. This info is for combine_temp_slots. */
204 HOST_WIDE_INT base_offset;
205 /* The size of the slot, including extra space for alignment. This
206 info is for combine_temp_slots. */
207 HOST_WIDE_INT full_size;
210 /* This structure is used to record MEMs or pseudos used to replace VAR, any
211 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
212 maintain this list in case two operands of an insn were required to match;
213 in that case we must ensure we use the same replacement. */
215 struct fixup_replacement
217 rtx old;
218 rtx new;
219 struct fixup_replacement *next;
222 struct insns_for_mem_entry
224 /* The KEY in HE will be a MEM. */
225 struct hash_entry he;
226 /* These are the INSNS which reference the MEM. */
227 rtx insns;
230 /* Forward declarations. */
232 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
233 int, struct function *));
234 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
235 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
236 enum machine_mode, enum machine_mode,
237 int, unsigned int, int,
238 struct hash_table *));
239 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
240 enum machine_mode,
241 struct hash_table *));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
247 int, int));
248 static void fixup_var_refs_insns_with_hash
249 PARAMS ((struct hash_table *, rtx,
250 enum machine_mode, int));
251 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
252 int, int));
253 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
254 struct fixup_replacement **));
255 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
256 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
257 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
258 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
259 static void instantiate_decls PARAMS ((tree, int));
260 static void instantiate_decls_1 PARAMS ((tree, int));
261 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
262 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
263 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
264 static void delete_handlers PARAMS ((void));
265 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
266 struct args_size *));
267 #ifndef ARGS_GROW_DOWNWARD
268 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
269 tree));
270 #endif
271 static rtx round_trampoline_addr PARAMS ((rtx));
272 static rtx adjust_trampoline_addr PARAMS ((rtx));
273 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
274 static void reorder_blocks_0 PARAMS ((tree));
275 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
276 static void reorder_fix_fragments PARAMS ((tree));
277 static tree blocks_nreverse PARAMS ((tree));
278 static int all_blocks PARAMS ((tree, tree *));
279 static tree *get_block_vector PARAMS ((tree, int *));
280 /* We always define `record_insns' even if its not used so that we
281 can always export `prologue_epilogue_contains'. */
282 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
283 static int contains PARAMS ((rtx, varray_type));
284 #ifdef HAVE_return
285 static void emit_return_into_block PARAMS ((basic_block, rtx));
286 #endif
287 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
288 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
289 struct hash_table *));
290 static void purge_single_hard_subreg_set PARAMS ((rtx));
291 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
292 static rtx keep_stack_depressed PARAMS ((rtx));
293 #endif
294 static int is_addressof PARAMS ((rtx *, void *));
295 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
296 struct hash_table *,
297 hash_table_key));
298 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
299 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
300 static int insns_for_mem_walk PARAMS ((rtx *, void *));
301 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
302 static void mark_function_status PARAMS ((struct function *));
303 static void maybe_mark_struct_function PARAMS ((void *));
304 static void prepare_function_start PARAMS ((void));
305 static void do_clobber_return_reg PARAMS ((rtx, void *));
306 static void do_use_return_reg PARAMS ((rtx, void *));
308 /* Pointer to chain of `struct function' for containing functions. */
309 static struct function *outer_function_chain;
311 /* Given a function decl for a containing function,
312 return the `struct function' for it. */
314 struct function *
315 find_function_data (decl)
316 tree decl;
318 struct function *p;
320 for (p = outer_function_chain; p; p = p->outer)
321 if (p->decl == decl)
322 return p;
324 abort ();
327 /* Save the current context for compilation of a nested function.
328 This is called from language-specific code. The caller should use
329 the save_lang_status callback to save any language-specific state,
330 since this function knows only about language-independent
331 variables. */
333 void
334 push_function_context_to (context)
335 tree context;
337 struct function *p;
339 if (context)
341 if (context == current_function_decl)
342 cfun->contains_functions = 1;
343 else
345 struct function *containing = find_function_data (context);
346 containing->contains_functions = 1;
350 if (cfun == 0)
351 init_dummy_function_start ();
352 p = cfun;
354 p->outer = outer_function_chain;
355 outer_function_chain = p;
356 p->fixup_var_refs_queue = 0;
358 if (save_lang_status)
359 (*save_lang_status) (p);
361 cfun = 0;
364 void
365 push_function_context ()
367 push_function_context_to (current_function_decl);
370 /* Restore the last saved context, at the end of a nested function.
371 This function is called from language-specific code. */
373 void
374 pop_function_context_from (context)
375 tree context ATTRIBUTE_UNUSED;
377 struct function *p = outer_function_chain;
378 struct var_refs_queue *queue;
380 cfun = p;
381 outer_function_chain = p->outer;
383 current_function_decl = p->decl;
384 reg_renumber = 0;
386 restore_emit_status (p);
388 if (restore_lang_status)
389 (*restore_lang_status) (p);
391 /* Finish doing put_var_into_stack for any of our variables
392 which became addressable during the nested function. */
393 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
394 fixup_var_refs (queue->modified, queue->promoted_mode,
395 queue->unsignedp, 0);
397 p->fixup_var_refs_queue = 0;
399 /* Reset variables that have known state during rtx generation. */
400 rtx_equal_function_value_matters = 1;
401 virtuals_instantiated = 0;
402 generating_concat_p = 1;
405 void
406 pop_function_context ()
408 pop_function_context_from (current_function_decl);
411 /* Clear out all parts of the state in F that can safely be discarded
412 after the function has been parsed, but not compiled, to let
413 garbage collection reclaim the memory. */
415 void
416 free_after_parsing (f)
417 struct function *f;
419 /* f->expr->forced_labels is used by code generation. */
420 /* f->emit->regno_reg_rtx is used by code generation. */
421 /* f->varasm is used by code generation. */
422 /* f->eh->eh_return_stub_label is used by code generation. */
424 if (free_lang_status)
425 (*free_lang_status) (f);
426 free_stmt_status (f);
429 /* Clear out all parts of the state in F that can safely be discarded
430 after the function has been compiled, to let garbage collection
431 reclaim the memory. */
433 void
434 free_after_compilation (f)
435 struct function *f;
437 free_eh_status (f);
438 free_expr_status (f);
439 free_emit_status (f);
440 free_varasm_status (f);
442 if (free_machine_status)
443 (*free_machine_status) (f);
445 if (f->x_parm_reg_stack_loc)
446 free (f->x_parm_reg_stack_loc);
448 f->x_temp_slots = NULL;
449 f->arg_offset_rtx = NULL;
450 f->return_rtx = NULL;
451 f->internal_arg_pointer = NULL;
452 f->x_nonlocal_labels = NULL;
453 f->x_nonlocal_goto_handler_slots = NULL;
454 f->x_nonlocal_goto_handler_labels = NULL;
455 f->x_nonlocal_goto_stack_level = NULL;
456 f->x_cleanup_label = NULL;
457 f->x_return_label = NULL;
458 f->x_save_expr_regs = NULL;
459 f->x_stack_slot_list = NULL;
460 f->x_rtl_expr_chain = NULL;
461 f->x_tail_recursion_label = NULL;
462 f->x_tail_recursion_reentry = NULL;
463 f->x_arg_pointer_save_area = NULL;
464 f->x_clobber_return_insn = NULL;
465 f->x_context_display = NULL;
466 f->x_trampoline_list = NULL;
467 f->x_parm_birth_insn = NULL;
468 f->x_last_parm_insn = NULL;
469 f->x_parm_reg_stack_loc = NULL;
470 f->fixup_var_refs_queue = NULL;
471 f->original_arg_vector = NULL;
472 f->original_decl_initial = NULL;
473 f->inl_last_parm_insn = NULL;
474 f->epilogue_delay_list = NULL;
477 /* Allocate fixed slots in the stack frame of the current function. */
479 /* Return size needed for stack frame based on slots so far allocated in
480 function F.
481 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
482 the caller may have to do that. */
484 HOST_WIDE_INT
485 get_func_frame_size (f)
486 struct function *f;
488 #ifdef FRAME_GROWS_DOWNWARD
489 return -f->x_frame_offset;
490 #else
491 return f->x_frame_offset;
492 #endif
495 /* Return size needed for stack frame based on slots so far allocated.
496 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
497 the caller may have to do that. */
498 HOST_WIDE_INT
499 get_frame_size ()
501 return get_func_frame_size (cfun);
504 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
505 with machine mode MODE.
507 ALIGN controls the amount of alignment for the address of the slot:
508 0 means according to MODE,
509 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
510 positive specifies alignment boundary in bits.
512 We do not round to stack_boundary here.
514 FUNCTION specifies the function to allocate in. */
516 static rtx
517 assign_stack_local_1 (mode, size, align, function)
518 enum machine_mode mode;
519 HOST_WIDE_INT size;
520 int align;
521 struct function *function;
523 rtx x, addr;
524 int bigend_correction = 0;
525 int alignment;
527 if (align == 0)
529 tree type;
531 if (mode == BLKmode)
532 alignment = BIGGEST_ALIGNMENT;
533 else
534 alignment = GET_MODE_ALIGNMENT (mode);
536 /* Allow the target to (possibly) increase the alignment of this
537 stack slot. */
538 type = type_for_mode (mode, 0);
539 if (type)
540 alignment = LOCAL_ALIGNMENT (type, alignment);
542 alignment /= BITS_PER_UNIT;
544 else if (align == -1)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 size = CEIL_ROUND (size, alignment);
549 else
550 alignment = align / BITS_PER_UNIT;
552 #ifdef FRAME_GROWS_DOWNWARD
553 function->x_frame_offset -= size;
554 #endif
556 /* Ignore alignment we can't do with expected alignment of the boundary. */
557 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
558 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
560 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
561 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
563 /* Round frame offset to that alignment.
564 We must be careful here, since FRAME_OFFSET might be negative and
565 division with a negative dividend isn't as well defined as we might
566 like. So we instead assume that ALIGNMENT is a power of two and
567 use logical operations which are unambiguous. */
568 #ifdef FRAME_GROWS_DOWNWARD
569 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
570 #else
571 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
572 #endif
574 /* On a big-endian machine, if we are allocating more space than we will use,
575 use the least significant bytes of those that are allocated. */
576 if (BYTES_BIG_ENDIAN && mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (function == cfun && virtuals_instantiated)
582 addr = plus_constant (frame_pointer_rtx,
583 (frame_offset + bigend_correction
584 + STARTING_FRAME_OFFSET));
585 else
586 addr = plus_constant (virtual_stack_vars_rtx,
587 function->x_frame_offset + bigend_correction);
589 #ifndef FRAME_GROWS_DOWNWARD
590 function->x_frame_offset += size;
591 #endif
593 x = gen_rtx_MEM (mode, addr);
595 function->x_stack_slot_list
596 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
598 return x;
601 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
602 current function. */
605 assign_stack_local (mode, size, align)
606 enum machine_mode mode;
607 HOST_WIDE_INT size;
608 int align;
610 return assign_stack_local_1 (mode, size, align, cfun);
613 /* Allocate a temporary stack slot and record it for possible later
614 reuse.
616 MODE is the machine mode to be given to the returned rtx.
618 SIZE is the size in units of the space required. We do no rounding here
619 since assign_stack_local will do any required rounding.
621 KEEP is 1 if this slot is to be retained after a call to
622 free_temp_slots. Automatic variables for a block are allocated
623 with this flag. KEEP is 2 if we allocate a longer term temporary,
624 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
625 if we are to allocate something at an inner level to be treated as
626 a variable in the block (e.g., a SAVE_EXPR).
628 TYPE is the type that will be used for the stack slot. */
631 assign_stack_temp_for_type (mode, size, keep, type)
632 enum machine_mode mode;
633 HOST_WIDE_INT size;
634 int keep;
635 tree type;
637 unsigned int align;
638 struct temp_slot *p, *best_p = 0;
640 /* If SIZE is -1 it means that somebody tried to allocate a temporary
641 of a variable size. */
642 if (size == -1)
643 abort ();
645 if (mode == BLKmode)
646 align = BIGGEST_ALIGNMENT;
647 else
648 align = GET_MODE_ALIGNMENT (mode);
650 if (! type)
651 type = type_for_mode (mode, 0);
653 if (type)
654 align = LOCAL_ALIGNMENT (type, align);
656 /* Try to find an available, already-allocated temporary of the proper
657 mode which meets the size and alignment requirements. Choose the
658 smallest one with the closest alignment. */
659 for (p = temp_slots; p; p = p->next)
660 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
661 && ! p->in_use
662 && objects_must_conflict_p (p->type, type)
663 && (best_p == 0 || best_p->size > p->size
664 || (best_p->size == p->size && best_p->align > p->align)))
666 if (p->align == align && p->size == size)
668 best_p = 0;
669 break;
671 best_p = p;
674 /* Make our best, if any, the one to use. */
675 if (best_p)
677 /* If there are enough aligned bytes left over, make them into a new
678 temp_slot so that the extra bytes don't get wasted. Do this only
679 for BLKmode slots, so that we can be sure of the alignment. */
680 if (GET_MODE (best_p->slot) == BLKmode)
682 int alignment = best_p->align / BITS_PER_UNIT;
683 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
685 if (best_p->size - rounded_size >= alignment)
687 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
688 p->in_use = p->addr_taken = 0;
689 p->size = best_p->size - rounded_size;
690 p->base_offset = best_p->base_offset + rounded_size;
691 p->full_size = best_p->full_size - rounded_size;
692 p->slot = gen_rtx_MEM (BLKmode,
693 plus_constant (XEXP (best_p->slot, 0),
694 rounded_size));
695 p->align = best_p->align;
696 p->address = 0;
697 p->rtl_expr = 0;
698 p->type = best_p->type;
699 p->next = temp_slots;
700 temp_slots = p;
702 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
703 stack_slot_list);
705 best_p->size = rounded_size;
706 best_p->full_size = rounded_size;
710 p = best_p;
713 /* If we still didn't find one, make a new temporary. */
714 if (p == 0)
716 HOST_WIDE_INT frame_offset_old = frame_offset;
718 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
720 /* We are passing an explicit alignment request to assign_stack_local.
721 One side effect of that is assign_stack_local will not round SIZE
722 to ensure the frame offset remains suitably aligned.
724 So for requests which depended on the rounding of SIZE, we go ahead
725 and round it now. We also make sure ALIGNMENT is at least
726 BIGGEST_ALIGNMENT. */
727 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
728 abort();
729 p->slot = assign_stack_local (mode,
730 (mode == BLKmode
731 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
732 : size),
733 align);
735 p->align = align;
737 /* The following slot size computation is necessary because we don't
738 know the actual size of the temporary slot until assign_stack_local
739 has performed all the frame alignment and size rounding for the
740 requested temporary. Note that extra space added for alignment
741 can be either above or below this stack slot depending on which
742 way the frame grows. We include the extra space if and only if it
743 is above this slot. */
744 #ifdef FRAME_GROWS_DOWNWARD
745 p->size = frame_offset_old - frame_offset;
746 #else
747 p->size = size;
748 #endif
750 /* Now define the fields used by combine_temp_slots. */
751 #ifdef FRAME_GROWS_DOWNWARD
752 p->base_offset = frame_offset;
753 p->full_size = frame_offset_old - frame_offset;
754 #else
755 p->base_offset = frame_offset_old;
756 p->full_size = frame_offset - frame_offset_old;
757 #endif
758 p->address = 0;
759 p->next = temp_slots;
760 temp_slots = p;
763 p->in_use = 1;
764 p->addr_taken = 0;
765 p->rtl_expr = seq_rtl_expr;
766 p->type = type;
768 if (keep == 2)
770 p->level = target_temp_slot_level;
771 p->keep = 0;
773 else if (keep == 3)
775 p->level = var_temp_slot_level;
776 p->keep = 0;
778 else
780 p->level = temp_slot_level;
781 p->keep = keep;
784 /* We may be reusing an old slot, so clear any MEM flags that may have been
785 set from before. */
786 RTX_UNCHANGING_P (p->slot) = 0;
787 MEM_IN_STRUCT_P (p->slot) = 0;
788 MEM_SCALAR_P (p->slot) = 0;
789 MEM_VOLATILE_P (p->slot) = 0;
791 /* If we know the alias set for the memory that will be used, use
792 it. If there's no TYPE, then we don't know anything about the
793 alias set for the memory. */
794 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
795 set_mem_align (p->slot, align);
797 /* If a type is specified, set the relevant flags. */
798 if (type != 0)
800 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
801 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
802 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
805 return p->slot;
808 /* Allocate a temporary stack slot and record it for possible later
809 reuse. First three arguments are same as in preceding function. */
812 assign_stack_temp (mode, size, keep)
813 enum machine_mode mode;
814 HOST_WIDE_INT size;
815 int keep;
817 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
820 /* Assign a temporary of given TYPE.
821 KEEP is as for assign_stack_temp.
822 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
823 it is 0 if a register is OK.
824 DONT_PROMOTE is 1 if we should not promote values in register
825 to wider modes. */
828 assign_temp (type, keep, memory_required, dont_promote)
829 tree type;
830 int keep;
831 int memory_required;
832 int dont_promote ATTRIBUTE_UNUSED;
834 enum machine_mode mode = TYPE_MODE (type);
835 #ifndef PROMOTE_FOR_CALL_ONLY
836 int unsignedp = TREE_UNSIGNED (type);
837 #endif
839 if (mode == BLKmode || memory_required)
841 HOST_WIDE_INT size = int_size_in_bytes (type);
842 rtx tmp;
844 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
845 problems with allocating the stack space. */
846 if (size == 0)
847 size = 1;
849 /* Unfortunately, we don't yet know how to allocate variable-sized
850 temporaries. However, sometimes we have a fixed upper limit on
851 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
852 instead. This is the case for Chill variable-sized strings. */
853 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
854 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
855 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
856 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
858 tmp = assign_stack_temp_for_type (mode, size, keep, type);
859 return tmp;
862 #ifndef PROMOTE_FOR_CALL_ONLY
863 if (! dont_promote)
864 mode = promote_mode (type, mode, &unsignedp, 0);
865 #endif
867 return gen_reg_rtx (mode);
870 /* Combine temporary stack slots which are adjacent on the stack.
872 This allows for better use of already allocated stack space. This is only
873 done for BLKmode slots because we can be sure that we won't have alignment
874 problems in this case. */
876 void
877 combine_temp_slots ()
879 struct temp_slot *p, *q;
880 struct temp_slot *prev_p, *prev_q;
881 int num_slots;
883 /* We can't combine slots, because the information about which slot
884 is in which alias set will be lost. */
885 if (flag_strict_aliasing)
886 return;
888 /* If there are a lot of temp slots, don't do anything unless
889 high levels of optimizaton. */
890 if (! flag_expensive_optimizations)
891 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
892 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
893 return;
895 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
897 int delete_p = 0;
899 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
900 for (q = p->next, prev_q = p; q; q = prev_q->next)
902 int delete_q = 0;
903 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
905 if (p->base_offset + p->full_size == q->base_offset)
907 /* Q comes after P; combine Q into P. */
908 p->size += q->size;
909 p->full_size += q->full_size;
910 delete_q = 1;
912 else if (q->base_offset + q->full_size == p->base_offset)
914 /* P comes after Q; combine P into Q. */
915 q->size += p->size;
916 q->full_size += p->full_size;
917 delete_p = 1;
918 break;
921 /* Either delete Q or advance past it. */
922 if (delete_q)
923 prev_q->next = q->next;
924 else
925 prev_q = q;
927 /* Either delete P or advance past it. */
928 if (delete_p)
930 if (prev_p)
931 prev_p->next = p->next;
932 else
933 temp_slots = p->next;
935 else
936 prev_p = p;
940 /* Find the temp slot corresponding to the object at address X. */
942 static struct temp_slot *
943 find_temp_slot_from_address (x)
944 rtx x;
946 struct temp_slot *p;
947 rtx next;
949 for (p = temp_slots; p; p = p->next)
951 if (! p->in_use)
952 continue;
954 else if (XEXP (p->slot, 0) == x
955 || p->address == x
956 || (GET_CODE (x) == PLUS
957 && XEXP (x, 0) == virtual_stack_vars_rtx
958 && GET_CODE (XEXP (x, 1)) == CONST_INT
959 && INTVAL (XEXP (x, 1)) >= p->base_offset
960 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
961 return p;
963 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
964 for (next = p->address; next; next = XEXP (next, 1))
965 if (XEXP (next, 0) == x)
966 return p;
969 /* If we have a sum involving a register, see if it points to a temp
970 slot. */
971 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
972 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
973 return p;
974 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
975 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
976 return p;
978 return 0;
981 /* Indicate that NEW is an alternate way of referring to the temp slot
982 that previously was known by OLD. */
984 void
985 update_temp_slot_address (old, new)
986 rtx old, new;
988 struct temp_slot *p;
990 if (rtx_equal_p (old, new))
991 return;
993 p = find_temp_slot_from_address (old);
995 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
996 is a register, see if one operand of the PLUS is a temporary
997 location. If so, NEW points into it. Otherwise, if both OLD and
998 NEW are a PLUS and if there is a register in common between them.
999 If so, try a recursive call on those values. */
1000 if (p == 0)
1002 if (GET_CODE (old) != PLUS)
1003 return;
1005 if (GET_CODE (new) == REG)
1007 update_temp_slot_address (XEXP (old, 0), new);
1008 update_temp_slot_address (XEXP (old, 1), new);
1009 return;
1011 else if (GET_CODE (new) != PLUS)
1012 return;
1014 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1015 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1016 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1017 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1018 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1019 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1020 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1021 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1023 return;
1026 /* Otherwise add an alias for the temp's address. */
1027 else if (p->address == 0)
1028 p->address = new;
1029 else
1031 if (GET_CODE (p->address) != EXPR_LIST)
1032 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1034 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1038 /* If X could be a reference to a temporary slot, mark the fact that its
1039 address was taken. */
1041 void
1042 mark_temp_addr_taken (x)
1043 rtx x;
1045 struct temp_slot *p;
1047 if (x == 0)
1048 return;
1050 /* If X is not in memory or is at a constant address, it cannot be in
1051 a temporary slot. */
1052 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1053 return;
1055 p = find_temp_slot_from_address (XEXP (x, 0));
1056 if (p != 0)
1057 p->addr_taken = 1;
1060 /* If X could be a reference to a temporary slot, mark that slot as
1061 belonging to the to one level higher than the current level. If X
1062 matched one of our slots, just mark that one. Otherwise, we can't
1063 easily predict which it is, so upgrade all of them. Kept slots
1064 need not be touched.
1066 This is called when an ({...}) construct occurs and a statement
1067 returns a value in memory. */
1069 void
1070 preserve_temp_slots (x)
1071 rtx x;
1073 struct temp_slot *p = 0;
1075 /* If there is no result, we still might have some objects whose address
1076 were taken, so we need to make sure they stay around. */
1077 if (x == 0)
1079 for (p = temp_slots; p; p = p->next)
1080 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1081 p->level--;
1083 return;
1086 /* If X is a register that is being used as a pointer, see if we have
1087 a temporary slot we know it points to. To be consistent with
1088 the code below, we really should preserve all non-kept slots
1089 if we can't find a match, but that seems to be much too costly. */
1090 if (GET_CODE (x) == REG && REG_POINTER (x))
1091 p = find_temp_slot_from_address (x);
1093 /* If X is not in memory or is at a constant address, it cannot be in
1094 a temporary slot, but it can contain something whose address was
1095 taken. */
1096 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1098 for (p = temp_slots; p; p = p->next)
1099 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1100 p->level--;
1102 return;
1105 /* First see if we can find a match. */
1106 if (p == 0)
1107 p = find_temp_slot_from_address (XEXP (x, 0));
1109 if (p != 0)
1111 /* Move everything at our level whose address was taken to our new
1112 level in case we used its address. */
1113 struct temp_slot *q;
1115 if (p->level == temp_slot_level)
1117 for (q = temp_slots; q; q = q->next)
1118 if (q != p && q->addr_taken && q->level == p->level)
1119 q->level--;
1121 p->level--;
1122 p->addr_taken = 0;
1124 return;
1127 /* Otherwise, preserve all non-kept slots at this level. */
1128 for (p = temp_slots; p; p = p->next)
1129 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1130 p->level--;
1133 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1134 with that RTL_EXPR, promote it into a temporary slot at the present
1135 level so it will not be freed when we free slots made in the
1136 RTL_EXPR. */
1138 void
1139 preserve_rtl_expr_result (x)
1140 rtx x;
1142 struct temp_slot *p;
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1147 return;
1149 /* If we can find a match, move it to our level unless it is already at
1150 an upper level. */
1151 p = find_temp_slot_from_address (XEXP (x, 0));
1152 if (p != 0)
1154 p->level = MIN (p->level, temp_slot_level);
1155 p->rtl_expr = 0;
1158 return;
1161 /* Free all temporaries used so far. This is normally called at the end
1162 of generating code for a statement. Don't free any temporaries
1163 currently in use for an RTL_EXPR that hasn't yet been emitted.
1164 We could eventually do better than this since it can be reused while
1165 generating the same RTL_EXPR, but this is complex and probably not
1166 worthwhile. */
1168 void
1169 free_temp_slots ()
1171 struct temp_slot *p;
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && ! p->keep
1175 && p->rtl_expr == 0)
1176 p->in_use = 0;
1178 combine_temp_slots ();
1181 /* Free all temporary slots used in T, an RTL_EXPR node. */
1183 void
1184 free_temps_for_rtl_expr (t)
1185 tree t;
1187 struct temp_slot *p;
1189 for (p = temp_slots; p; p = p->next)
1190 if (p->rtl_expr == t)
1192 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1193 needs to be preserved. This can happen if a temporary in
1194 the RTL_EXPR was addressed; preserve_temp_slots will move
1195 the temporary into a higher level. */
1196 if (temp_slot_level <= p->level)
1197 p->in_use = 0;
1198 else
1199 p->rtl_expr = NULL_TREE;
1202 combine_temp_slots ();
1205 /* Mark all temporaries ever allocated in this function as not suitable
1206 for reuse until the current level is exited. */
1208 void
1209 mark_all_temps_used ()
1211 struct temp_slot *p;
1213 for (p = temp_slots; p; p = p->next)
1215 p->in_use = p->keep = 1;
1216 p->level = MIN (p->level, temp_slot_level);
1220 /* Push deeper into the nesting level for stack temporaries. */
1222 void
1223 push_temp_slots ()
1225 temp_slot_level++;
1228 /* Likewise, but save the new level as the place to allocate variables
1229 for blocks. */
1231 #if 0
1232 void
1233 push_temp_slots_for_block ()
1235 push_temp_slots ();
1237 var_temp_slot_level = temp_slot_level;
1240 /* Likewise, but save the new level as the place to allocate temporaries
1241 for TARGET_EXPRs. */
1243 void
1244 push_temp_slots_for_target ()
1246 push_temp_slots ();
1248 target_temp_slot_level = temp_slot_level;
1251 /* Set and get the value of target_temp_slot_level. The only
1252 permitted use of these functions is to save and restore this value. */
1255 get_target_temp_slot_level ()
1257 return target_temp_slot_level;
1260 void
1261 set_target_temp_slot_level (level)
1262 int level;
1264 target_temp_slot_level = level;
1266 #endif
1268 /* Pop a temporary nesting level. All slots in use in the current level
1269 are freed. */
1271 void
1272 pop_temp_slots ()
1274 struct temp_slot *p;
1276 for (p = temp_slots; p; p = p->next)
1277 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1278 p->in_use = 0;
1280 combine_temp_slots ();
1282 temp_slot_level--;
1285 /* Initialize temporary slots. */
1287 void
1288 init_temp_slots ()
1290 /* We have not allocated any temporaries yet. */
1291 temp_slots = 0;
1292 temp_slot_level = 0;
1293 var_temp_slot_level = 0;
1294 target_temp_slot_level = 0;
1297 /* Retroactively move an auto variable from a register to a stack slot.
1298 This is done when an address-reference to the variable is seen. */
1300 void
1301 put_var_into_stack (decl)
1302 tree decl;
1304 rtx reg;
1305 enum machine_mode promoted_mode, decl_mode;
1306 struct function *function = 0;
1307 tree context;
1308 int can_use_addressof;
1309 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1310 int usedp = (TREE_USED (decl)
1311 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1313 context = decl_function_context (decl);
1315 /* Get the current rtl used for this object and its original mode. */
1316 reg = (TREE_CODE (decl) == SAVE_EXPR
1317 ? SAVE_EXPR_RTL (decl)
1318 : DECL_RTL_IF_SET (decl));
1320 /* No need to do anything if decl has no rtx yet
1321 since in that case caller is setting TREE_ADDRESSABLE
1322 and a stack slot will be assigned when the rtl is made. */
1323 if (reg == 0)
1324 return;
1326 /* Get the declared mode for this object. */
1327 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1328 : DECL_MODE (decl));
1329 /* Get the mode it's actually stored in. */
1330 promoted_mode = GET_MODE (reg);
1332 /* If this variable comes from an outer function, find that
1333 function's saved context. Don't use find_function_data here,
1334 because it might not be in any active function.
1335 FIXME: Is that really supposed to happen?
1336 It does in ObjC at least. */
1337 if (context != current_function_decl && context != inline_function_decl)
1338 for (function = outer_function_chain; function; function = function->outer)
1339 if (function->decl == context)
1340 break;
1342 /* If this is a variable-size object with a pseudo to address it,
1343 put that pseudo into the stack, if the var is nonlocal. */
1344 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1345 && GET_CODE (reg) == MEM
1346 && GET_CODE (XEXP (reg, 0)) == REG
1347 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1349 reg = XEXP (reg, 0);
1350 decl_mode = promoted_mode = GET_MODE (reg);
1353 can_use_addressof
1354 = (function == 0
1355 && optimize > 0
1356 /* FIXME make it work for promoted modes too */
1357 && decl_mode == promoted_mode
1358 #ifdef NON_SAVING_SETJMP
1359 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1360 #endif
1363 /* If we can't use ADDRESSOF, make sure we see through one we already
1364 generated. */
1365 if (! can_use_addressof && GET_CODE (reg) == MEM
1366 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1367 reg = XEXP (XEXP (reg, 0), 0);
1369 /* Now we should have a value that resides in one or more pseudo regs. */
1371 if (GET_CODE (reg) == REG)
1373 /* If this variable lives in the current function and we don't need
1374 to put things in the stack for the sake of setjmp, try to keep it
1375 in a register until we know we actually need the address. */
1376 if (can_use_addressof)
1377 gen_mem_addressof (reg, decl);
1378 else
1379 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1380 decl_mode, volatilep, 0, usedp, 0);
1382 else if (GET_CODE (reg) == CONCAT)
1384 /* A CONCAT contains two pseudos; put them both in the stack.
1385 We do it so they end up consecutive.
1386 We fixup references to the parts only after we fixup references
1387 to the whole CONCAT, lest we do double fixups for the latter
1388 references. */
1389 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1390 tree part_type = type_for_mode (part_mode, 0);
1391 rtx lopart = XEXP (reg, 0);
1392 rtx hipart = XEXP (reg, 1);
1393 #ifdef FRAME_GROWS_DOWNWARD
1394 /* Since part 0 should have a lower address, do it second. */
1395 put_reg_into_stack (function, hipart, part_type, part_mode,
1396 part_mode, volatilep, 0, 0, 0);
1397 put_reg_into_stack (function, lopart, part_type, part_mode,
1398 part_mode, volatilep, 0, 0, 0);
1399 #else
1400 put_reg_into_stack (function, lopart, part_type, part_mode,
1401 part_mode, volatilep, 0, 0, 0);
1402 put_reg_into_stack (function, hipart, part_type, part_mode,
1403 part_mode, volatilep, 0, 0, 0);
1404 #endif
1406 /* Change the CONCAT into a combined MEM for both parts. */
1407 PUT_CODE (reg, MEM);
1408 MEM_ATTRS (reg) = 0;
1410 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1411 already computed alias sets. Here we want to re-generate. */
1412 if (DECL_P (decl))
1413 SET_DECL_RTL (decl, NULL);
1414 set_mem_attributes (reg, decl, 1);
1415 if (DECL_P (decl))
1416 SET_DECL_RTL (decl, reg);
1418 /* The two parts are in memory order already.
1419 Use the lower parts address as ours. */
1420 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1421 /* Prevent sharing of rtl that might lose. */
1422 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1423 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1424 if (usedp)
1426 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1427 promoted_mode, 0);
1428 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1429 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1432 else
1433 return;
1435 if (current_function_check_memory_usage)
1436 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1437 3, XEXP (reg, 0), Pmode,
1438 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1439 TYPE_MODE (sizetype),
1440 GEN_INT (MEMORY_USE_RW),
1441 TYPE_MODE (integer_type_node));
1444 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1445 into the stack frame of FUNCTION (0 means the current function).
1446 DECL_MODE is the machine mode of the user-level data type.
1447 PROMOTED_MODE is the machine mode of the register.
1448 VOLATILE_P is nonzero if this is for a "volatile" decl.
1449 USED_P is nonzero if this reg might have already been used in an insn. */
1451 static void
1452 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1453 original_regno, used_p, ht)
1454 struct function *function;
1455 rtx reg;
1456 tree type;
1457 enum machine_mode promoted_mode, decl_mode;
1458 int volatile_p;
1459 unsigned int original_regno;
1460 int used_p;
1461 struct hash_table *ht;
1463 struct function *func = function ? function : cfun;
1464 rtx new = 0;
1465 unsigned int regno = original_regno;
1467 if (regno == 0)
1468 regno = REGNO (reg);
1470 if (regno < func->x_max_parm_reg)
1471 new = func->x_parm_reg_stack_loc[regno];
1473 if (new == 0)
1474 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1476 PUT_CODE (reg, MEM);
1477 PUT_MODE (reg, decl_mode);
1478 XEXP (reg, 0) = XEXP (new, 0);
1479 MEM_ATTRS (reg) = 0;
1480 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1481 MEM_VOLATILE_P (reg) = volatile_p;
1483 /* If this is a memory ref that contains aggregate components,
1484 mark it as such for cse and loop optimize. If we are reusing a
1485 previously generated stack slot, then we need to copy the bit in
1486 case it was set for other reasons. For instance, it is set for
1487 __builtin_va_alist. */
1488 if (type)
1490 MEM_SET_IN_STRUCT_P (reg,
1491 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1492 set_mem_alias_set (reg, get_alias_set (type));
1495 if (used_p)
1496 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1499 /* Make sure that all refs to the variable, previously made
1500 when it was a register, are fixed up to be valid again.
1501 See function above for meaning of arguments. */
1503 static void
1504 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1505 struct function *function;
1506 rtx reg;
1507 tree type;
1508 enum machine_mode promoted_mode;
1509 struct hash_table *ht;
1511 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1513 if (function != 0)
1515 struct var_refs_queue *temp;
1517 temp
1518 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1519 temp->modified = reg;
1520 temp->promoted_mode = promoted_mode;
1521 temp->unsignedp = unsigned_p;
1522 temp->next = function->fixup_var_refs_queue;
1523 function->fixup_var_refs_queue = temp;
1525 else
1526 /* Variable is local; fix it up now. */
1527 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1530 static void
1531 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1532 rtx var;
1533 enum machine_mode promoted_mode;
1534 int unsignedp;
1535 struct hash_table *ht;
1537 tree pending;
1538 rtx first_insn = get_insns ();
1539 struct sequence_stack *stack = seq_stack;
1540 tree rtl_exps = rtl_expr_chain;
1542 /* If there's a hash table, it must record all uses of VAR. */
1543 if (ht)
1545 if (stack != 0)
1546 abort ();
1547 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1548 return;
1551 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1552 stack == 0);
1554 /* Scan all pending sequences too. */
1555 for (; stack; stack = stack->next)
1557 push_to_full_sequence (stack->first, stack->last);
1558 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1559 stack->next != 0);
1560 /* Update remembered end of sequence
1561 in case we added an insn at the end. */
1562 stack->last = get_last_insn ();
1563 end_sequence ();
1566 /* Scan all waiting RTL_EXPRs too. */
1567 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1569 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1570 if (seq != const0_rtx && seq != 0)
1572 push_to_sequence (seq);
1573 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1574 end_sequence ();
1579 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1580 some part of an insn. Return a struct fixup_replacement whose OLD
1581 value is equal to X. Allocate a new structure if no such entry exists. */
1583 static struct fixup_replacement *
1584 find_fixup_replacement (replacements, x)
1585 struct fixup_replacement **replacements;
1586 rtx x;
1588 struct fixup_replacement *p;
1590 /* See if we have already replaced this. */
1591 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1594 if (p == 0)
1596 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1597 p->old = x;
1598 p->new = 0;
1599 p->next = *replacements;
1600 *replacements = p;
1603 return p;
1606 /* Scan the insn-chain starting with INSN for refs to VAR
1607 and fix them up. TOPLEVEL is nonzero if this chain is the
1608 main chain of insns for the current function. */
1610 static void
1611 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1612 rtx insn;
1613 rtx var;
1614 enum machine_mode promoted_mode;
1615 int unsignedp;
1616 int toplevel;
1618 while (insn)
1620 /* fixup_var_refs_insn might modify insn, so save its next
1621 pointer now. */
1622 rtx next = NEXT_INSN (insn);
1624 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1625 the three sequences they (potentially) contain, and process
1626 them recursively. The CALL_INSN itself is not interesting. */
1628 if (GET_CODE (insn) == CALL_INSN
1629 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1631 int i;
1633 /* Look at the Normal call, sibling call and tail recursion
1634 sequences attached to the CALL_PLACEHOLDER. */
1635 for (i = 0; i < 3; i++)
1637 rtx seq = XEXP (PATTERN (insn), i);
1638 if (seq)
1640 push_to_sequence (seq);
1641 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1642 XEXP (PATTERN (insn), i) = get_insns ();
1643 end_sequence ();
1648 else if (INSN_P (insn))
1649 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1651 insn = next;
1655 /* Look up the insns which reference VAR in HT and fix them up. Other
1656 arguments are the same as fixup_var_refs_insns.
1658 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1659 because the hash table will point straight to the interesting insn
1660 (inside the CALL_PLACEHOLDER). */
1662 static void
1663 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1664 struct hash_table *ht;
1665 rtx var;
1666 enum machine_mode promoted_mode;
1667 int unsignedp;
1669 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1670 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1671 rtx insn_list = ime->insns;
1673 while (insn_list)
1675 rtx insn = XEXP (insn_list, 0);
1677 if (INSN_P (insn))
1678 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
1680 insn_list = XEXP (insn_list, 1);
1685 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1686 the insn under examination, VAR is the variable to fix up
1687 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1688 TOPLEVEL is nonzero if this is the main insn chain for this
1689 function. */
1691 static void
1692 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1693 rtx insn;
1694 rtx var;
1695 enum machine_mode promoted_mode;
1696 int unsignedp;
1697 int toplevel;
1699 rtx call_dest = 0;
1700 rtx set, prev, prev_set;
1701 rtx note;
1703 /* Remember the notes in case we delete the insn. */
1704 note = REG_NOTES (insn);
1706 /* If this is a CLOBBER of VAR, delete it.
1708 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1709 and REG_RETVAL notes too. */
1710 if (GET_CODE (PATTERN (insn)) == CLOBBER
1711 && (XEXP (PATTERN (insn), 0) == var
1712 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1713 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1714 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1716 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1717 /* The REG_LIBCALL note will go away since we are going to
1718 turn INSN into a NOTE, so just delete the
1719 corresponding REG_RETVAL note. */
1720 remove_note (XEXP (note, 0),
1721 find_reg_note (XEXP (note, 0), REG_RETVAL,
1722 NULL_RTX));
1724 delete_insn (insn);
1727 /* The insn to load VAR from a home in the arglist
1728 is now a no-op. When we see it, just delete it.
1729 Similarly if this is storing VAR from a register from which
1730 it was loaded in the previous insn. This will occur
1731 when an ADDRESSOF was made for an arglist slot. */
1732 else if (toplevel
1733 && (set = single_set (insn)) != 0
1734 && SET_DEST (set) == var
1735 /* If this represents the result of an insn group,
1736 don't delete the insn. */
1737 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1738 && (rtx_equal_p (SET_SRC (set), var)
1739 || (GET_CODE (SET_SRC (set)) == REG
1740 && (prev = prev_nonnote_insn (insn)) != 0
1741 && (prev_set = single_set (prev)) != 0
1742 && SET_DEST (prev_set) == SET_SRC (set)
1743 && rtx_equal_p (SET_SRC (prev_set), var))))
1745 delete_insn (insn);
1747 else
1749 struct fixup_replacement *replacements = 0;
1750 rtx next_insn = NEXT_INSN (insn);
1752 if (SMALL_REGISTER_CLASSES)
1754 /* If the insn that copies the results of a CALL_INSN
1755 into a pseudo now references VAR, we have to use an
1756 intermediate pseudo since we want the life of the
1757 return value register to be only a single insn.
1759 If we don't use an intermediate pseudo, such things as
1760 address computations to make the address of VAR valid
1761 if it is not can be placed between the CALL_INSN and INSN.
1763 To make sure this doesn't happen, we record the destination
1764 of the CALL_INSN and see if the next insn uses both that
1765 and VAR. */
1767 if (call_dest != 0 && GET_CODE (insn) == INSN
1768 && reg_mentioned_p (var, PATTERN (insn))
1769 && reg_mentioned_p (call_dest, PATTERN (insn)))
1771 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1773 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1775 PATTERN (insn) = replace_rtx (PATTERN (insn),
1776 call_dest, temp);
1779 if (GET_CODE (insn) == CALL_INSN
1780 && GET_CODE (PATTERN (insn)) == SET)
1781 call_dest = SET_DEST (PATTERN (insn));
1782 else if (GET_CODE (insn) == CALL_INSN
1783 && GET_CODE (PATTERN (insn)) == PARALLEL
1784 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1785 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1786 else
1787 call_dest = 0;
1790 /* See if we have to do anything to INSN now that VAR is in
1791 memory. If it needs to be loaded into a pseudo, use a single
1792 pseudo for the entire insn in case there is a MATCH_DUP
1793 between two operands. We pass a pointer to the head of
1794 a list of struct fixup_replacements. If fixup_var_refs_1
1795 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1796 it will record them in this list.
1798 If it allocated a pseudo for any replacement, we copy into
1799 it here. */
1801 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1802 &replacements);
1804 /* If this is last_parm_insn, and any instructions were output
1805 after it to fix it up, then we must set last_parm_insn to
1806 the last such instruction emitted. */
1807 if (insn == last_parm_insn)
1808 last_parm_insn = PREV_INSN (next_insn);
1810 while (replacements)
1812 struct fixup_replacement *next;
1814 if (GET_CODE (replacements->new) == REG)
1816 rtx insert_before;
1817 rtx seq;
1819 /* OLD might be a (subreg (mem)). */
1820 if (GET_CODE (replacements->old) == SUBREG)
1821 replacements->old
1822 = fixup_memory_subreg (replacements->old, insn, 0);
1823 else
1824 replacements->old
1825 = fixup_stack_1 (replacements->old, insn);
1827 insert_before = insn;
1829 /* If we are changing the mode, do a conversion.
1830 This might be wasteful, but combine.c will
1831 eliminate much of the waste. */
1833 if (GET_MODE (replacements->new)
1834 != GET_MODE (replacements->old))
1836 start_sequence ();
1837 convert_move (replacements->new,
1838 replacements->old, unsignedp);
1839 seq = gen_sequence ();
1840 end_sequence ();
1842 else
1843 seq = gen_move_insn (replacements->new,
1844 replacements->old);
1846 emit_insn_before (seq, insert_before);
1849 next = replacements->next;
1850 free (replacements);
1851 replacements = next;
1855 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1856 But don't touch other insns referred to by reg-notes;
1857 we will get them elsewhere. */
1858 while (note)
1860 if (GET_CODE (note) != INSN_LIST)
1861 XEXP (note, 0)
1862 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1863 note = XEXP (note, 1);
1867 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1868 See if the rtx expression at *LOC in INSN needs to be changed.
1870 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1871 contain a list of original rtx's and replacements. If we find that we need
1872 to modify this insn by replacing a memory reference with a pseudo or by
1873 making a new MEM to implement a SUBREG, we consult that list to see if
1874 we have already chosen a replacement. If none has already been allocated,
1875 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1876 or the SUBREG, as appropriate, to the pseudo. */
1878 static void
1879 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1880 rtx var;
1881 enum machine_mode promoted_mode;
1882 rtx *loc;
1883 rtx insn;
1884 struct fixup_replacement **replacements;
1886 int i;
1887 rtx x = *loc;
1888 RTX_CODE code = GET_CODE (x);
1889 const char *fmt;
1890 rtx tem, tem1;
1891 struct fixup_replacement *replacement;
1893 switch (code)
1895 case ADDRESSOF:
1896 if (XEXP (x, 0) == var)
1898 /* Prevent sharing of rtl that might lose. */
1899 rtx sub = copy_rtx (XEXP (var, 0));
1901 if (! validate_change (insn, loc, sub, 0))
1903 rtx y = gen_reg_rtx (GET_MODE (sub));
1904 rtx seq, new_insn;
1906 /* We should be able to replace with a register or all is lost.
1907 Note that we can't use validate_change to verify this, since
1908 we're not caring for replacing all dups simultaneously. */
1909 if (! validate_replace_rtx (*loc, y, insn))
1910 abort ();
1912 /* Careful! First try to recognize a direct move of the
1913 value, mimicking how things are done in gen_reload wrt
1914 PLUS. Consider what happens when insn is a conditional
1915 move instruction and addsi3 clobbers flags. */
1917 start_sequence ();
1918 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1919 seq = gen_sequence ();
1920 end_sequence ();
1922 if (recog_memoized (new_insn) < 0)
1924 /* That failed. Fall back on force_operand and hope. */
1926 start_sequence ();
1927 sub = force_operand (sub, y);
1928 if (sub != y)
1929 emit_insn (gen_move_insn (y, sub));
1930 seq = gen_sequence ();
1931 end_sequence ();
1934 #ifdef HAVE_cc0
1935 /* Don't separate setter from user. */
1936 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1937 insn = PREV_INSN (insn);
1938 #endif
1940 emit_insn_before (seq, insn);
1943 return;
1945 case MEM:
1946 if (var == x)
1948 /* If we already have a replacement, use it. Otherwise,
1949 try to fix up this address in case it is invalid. */
1951 replacement = find_fixup_replacement (replacements, var);
1952 if (replacement->new)
1954 *loc = replacement->new;
1955 return;
1958 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1960 /* Unless we are forcing memory to register or we changed the mode,
1961 we can leave things the way they are if the insn is valid. */
1963 INSN_CODE (insn) = -1;
1964 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1965 && recog_memoized (insn) >= 0)
1966 return;
1968 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1969 return;
1972 /* If X contains VAR, we need to unshare it here so that we update
1973 each occurrence separately. But all identical MEMs in one insn
1974 must be replaced with the same rtx because of the possibility of
1975 MATCH_DUPs. */
1977 if (reg_mentioned_p (var, x))
1979 replacement = find_fixup_replacement (replacements, x);
1980 if (replacement->new == 0)
1981 replacement->new = copy_most_rtx (x, var);
1983 *loc = x = replacement->new;
1984 code = GET_CODE (x);
1986 break;
1988 case REG:
1989 case CC0:
1990 case PC:
1991 case CONST_INT:
1992 case CONST:
1993 case SYMBOL_REF:
1994 case LABEL_REF:
1995 case CONST_DOUBLE:
1996 return;
1998 case SIGN_EXTRACT:
1999 case ZERO_EXTRACT:
2000 /* Note that in some cases those types of expressions are altered
2001 by optimize_bit_field, and do not survive to get here. */
2002 if (XEXP (x, 0) == var
2003 || (GET_CODE (XEXP (x, 0)) == SUBREG
2004 && SUBREG_REG (XEXP (x, 0)) == var))
2006 /* Get TEM as a valid MEM in the mode presently in the insn.
2008 We don't worry about the possibility of MATCH_DUP here; it
2009 is highly unlikely and would be tricky to handle. */
2011 tem = XEXP (x, 0);
2012 if (GET_CODE (tem) == SUBREG)
2014 if (GET_MODE_BITSIZE (GET_MODE (tem))
2015 > GET_MODE_BITSIZE (GET_MODE (var)))
2017 replacement = find_fixup_replacement (replacements, var);
2018 if (replacement->new == 0)
2019 replacement->new = gen_reg_rtx (GET_MODE (var));
2020 SUBREG_REG (tem) = replacement->new;
2022 /* The following code works only if we have a MEM, so we
2023 need to handle the subreg here. We directly substitute
2024 it assuming that a subreg must be OK here. We already
2025 scheduled a replacement to copy the mem into the
2026 subreg. */
2027 XEXP (x, 0) = tem;
2028 return;
2030 else
2031 tem = fixup_memory_subreg (tem, insn, 0);
2033 else
2034 tem = fixup_stack_1 (tem, insn);
2036 /* Unless we want to load from memory, get TEM into the proper mode
2037 for an extract from memory. This can only be done if the
2038 extract is at a constant position and length. */
2040 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2041 && GET_CODE (XEXP (x, 2)) == CONST_INT
2042 && ! mode_dependent_address_p (XEXP (tem, 0))
2043 && ! MEM_VOLATILE_P (tem))
2045 enum machine_mode wanted_mode = VOIDmode;
2046 enum machine_mode is_mode = GET_MODE (tem);
2047 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2049 if (GET_CODE (x) == ZERO_EXTRACT)
2051 enum machine_mode new_mode
2052 = mode_for_extraction (EP_extzv, 1);
2053 if (new_mode != MAX_MACHINE_MODE)
2054 wanted_mode = new_mode;
2056 else if (GET_CODE (x) == SIGN_EXTRACT)
2058 enum machine_mode new_mode
2059 = mode_for_extraction (EP_extv, 1);
2060 if (new_mode != MAX_MACHINE_MODE)
2061 wanted_mode = new_mode;
2064 /* If we have a narrower mode, we can do something. */
2065 if (wanted_mode != VOIDmode
2066 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2068 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2069 rtx old_pos = XEXP (x, 2);
2070 rtx newmem;
2072 /* If the bytes and bits are counted differently, we
2073 must adjust the offset. */
2074 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2075 offset = (GET_MODE_SIZE (is_mode)
2076 - GET_MODE_SIZE (wanted_mode) - offset);
2078 pos %= GET_MODE_BITSIZE (wanted_mode);
2080 newmem = adjust_address_nv (tem, wanted_mode, offset);
2082 /* Make the change and see if the insn remains valid. */
2083 INSN_CODE (insn) = -1;
2084 XEXP (x, 0) = newmem;
2085 XEXP (x, 2) = GEN_INT (pos);
2087 if (recog_memoized (insn) >= 0)
2088 return;
2090 /* Otherwise, restore old position. XEXP (x, 0) will be
2091 restored later. */
2092 XEXP (x, 2) = old_pos;
2096 /* If we get here, the bitfield extract insn can't accept a memory
2097 reference. Copy the input into a register. */
2099 tem1 = gen_reg_rtx (GET_MODE (tem));
2100 emit_insn_before (gen_move_insn (tem1, tem), insn);
2101 XEXP (x, 0) = tem1;
2102 return;
2104 break;
2106 case SUBREG:
2107 if (SUBREG_REG (x) == var)
2109 /* If this is a special SUBREG made because VAR was promoted
2110 from a wider mode, replace it with VAR and call ourself
2111 recursively, this time saying that the object previously
2112 had its current mode (by virtue of the SUBREG). */
2114 if (SUBREG_PROMOTED_VAR_P (x))
2116 *loc = var;
2117 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2118 return;
2121 /* If this SUBREG makes VAR wider, it has become a paradoxical
2122 SUBREG with VAR in memory, but these aren't allowed at this
2123 stage of the compilation. So load VAR into a pseudo and take
2124 a SUBREG of that pseudo. */
2125 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2127 replacement = find_fixup_replacement (replacements, var);
2128 if (replacement->new == 0)
2129 replacement->new = gen_reg_rtx (promoted_mode);
2130 SUBREG_REG (x) = replacement->new;
2131 return;
2134 /* See if we have already found a replacement for this SUBREG.
2135 If so, use it. Otherwise, make a MEM and see if the insn
2136 is recognized. If not, or if we should force MEM into a register,
2137 make a pseudo for this SUBREG. */
2138 replacement = find_fixup_replacement (replacements, x);
2139 if (replacement->new)
2141 *loc = replacement->new;
2142 return;
2145 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2147 INSN_CODE (insn) = -1;
2148 if (! flag_force_mem && recog_memoized (insn) >= 0)
2149 return;
2151 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2152 return;
2154 break;
2156 case SET:
2157 /* First do special simplification of bit-field references. */
2158 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2159 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2160 optimize_bit_field (x, insn, 0);
2161 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2162 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2163 optimize_bit_field (x, insn, 0);
2165 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2166 into a register and then store it back out. */
2167 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2168 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2169 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2170 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2171 > GET_MODE_SIZE (GET_MODE (var))))
2173 replacement = find_fixup_replacement (replacements, var);
2174 if (replacement->new == 0)
2175 replacement->new = gen_reg_rtx (GET_MODE (var));
2177 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2178 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2181 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2182 insn into a pseudo and store the low part of the pseudo into VAR. */
2183 if (GET_CODE (SET_DEST (x)) == SUBREG
2184 && SUBREG_REG (SET_DEST (x)) == var
2185 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2186 > GET_MODE_SIZE (GET_MODE (var))))
2188 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2189 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2190 tem)),
2191 insn);
2192 break;
2196 rtx dest = SET_DEST (x);
2197 rtx src = SET_SRC (x);
2198 rtx outerdest = dest;
2200 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2201 || GET_CODE (dest) == SIGN_EXTRACT
2202 || GET_CODE (dest) == ZERO_EXTRACT)
2203 dest = XEXP (dest, 0);
2205 if (GET_CODE (src) == SUBREG)
2206 src = SUBREG_REG (src);
2208 /* If VAR does not appear at the top level of the SET
2209 just scan the lower levels of the tree. */
2211 if (src != var && dest != var)
2212 break;
2214 /* We will need to rerecognize this insn. */
2215 INSN_CODE (insn) = -1;
2217 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2218 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2220 /* Since this case will return, ensure we fixup all the
2221 operands here. */
2222 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2223 insn, replacements);
2224 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2225 insn, replacements);
2226 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2227 insn, replacements);
2229 tem = XEXP (outerdest, 0);
2231 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2232 that may appear inside a ZERO_EXTRACT.
2233 This was legitimate when the MEM was a REG. */
2234 if (GET_CODE (tem) == SUBREG
2235 && SUBREG_REG (tem) == var)
2236 tem = fixup_memory_subreg (tem, insn, 0);
2237 else
2238 tem = fixup_stack_1 (tem, insn);
2240 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2241 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2242 && ! mode_dependent_address_p (XEXP (tem, 0))
2243 && ! MEM_VOLATILE_P (tem))
2245 enum machine_mode wanted_mode;
2246 enum machine_mode is_mode = GET_MODE (tem);
2247 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2249 wanted_mode = mode_for_extraction (EP_insv, 0);
2251 /* If we have a narrower mode, we can do something. */
2252 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2254 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2255 rtx old_pos = XEXP (outerdest, 2);
2256 rtx newmem;
2258 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2259 offset = (GET_MODE_SIZE (is_mode)
2260 - GET_MODE_SIZE (wanted_mode) - offset);
2262 pos %= GET_MODE_BITSIZE (wanted_mode);
2264 newmem = adjust_address_nv (tem, wanted_mode, offset);
2266 /* Make the change and see if the insn remains valid. */
2267 INSN_CODE (insn) = -1;
2268 XEXP (outerdest, 0) = newmem;
2269 XEXP (outerdest, 2) = GEN_INT (pos);
2271 if (recog_memoized (insn) >= 0)
2272 return;
2274 /* Otherwise, restore old position. XEXP (x, 0) will be
2275 restored later. */
2276 XEXP (outerdest, 2) = old_pos;
2280 /* If we get here, the bit-field store doesn't allow memory
2281 or isn't located at a constant position. Load the value into
2282 a register, do the store, and put it back into memory. */
2284 tem1 = gen_reg_rtx (GET_MODE (tem));
2285 emit_insn_before (gen_move_insn (tem1, tem), insn);
2286 emit_insn_after (gen_move_insn (tem, tem1), insn);
2287 XEXP (outerdest, 0) = tem1;
2288 return;
2291 /* STRICT_LOW_PART is a no-op on memory references
2292 and it can cause combinations to be unrecognizable,
2293 so eliminate it. */
2295 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2296 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2298 /* A valid insn to copy VAR into or out of a register
2299 must be left alone, to avoid an infinite loop here.
2300 If the reference to VAR is by a subreg, fix that up,
2301 since SUBREG is not valid for a memref.
2302 Also fix up the address of the stack slot.
2304 Note that we must not try to recognize the insn until
2305 after we know that we have valid addresses and no
2306 (subreg (mem ...) ...) constructs, since these interfere
2307 with determining the validity of the insn. */
2309 if ((SET_SRC (x) == var
2310 || (GET_CODE (SET_SRC (x)) == SUBREG
2311 && SUBREG_REG (SET_SRC (x)) == var))
2312 && (GET_CODE (SET_DEST (x)) == REG
2313 || (GET_CODE (SET_DEST (x)) == SUBREG
2314 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2315 && GET_MODE (var) == promoted_mode
2316 && x == single_set (insn))
2318 rtx pat, last;
2320 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2321 if (replacement->new)
2322 SET_SRC (x) = replacement->new;
2323 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2324 SET_SRC (x) = replacement->new
2325 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2326 else
2327 SET_SRC (x) = replacement->new
2328 = fixup_stack_1 (SET_SRC (x), insn);
2330 if (recog_memoized (insn) >= 0)
2331 return;
2333 /* INSN is not valid, but we know that we want to
2334 copy SET_SRC (x) to SET_DEST (x) in some way. So
2335 we generate the move and see whether it requires more
2336 than one insn. If it does, we emit those insns and
2337 delete INSN. Otherwise, we an just replace the pattern
2338 of INSN; we have already verified above that INSN has
2339 no other function that to do X. */
2341 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2342 if (GET_CODE (pat) == SEQUENCE)
2344 last = emit_insn_before (pat, insn);
2346 /* INSN might have REG_RETVAL or other important notes, so
2347 we need to store the pattern of the last insn in the
2348 sequence into INSN similarly to the normal case. LAST
2349 should not have REG_NOTES, but we allow them if INSN has
2350 no REG_NOTES. */
2351 if (REG_NOTES (last) && REG_NOTES (insn))
2352 abort ();
2353 if (REG_NOTES (last))
2354 REG_NOTES (insn) = REG_NOTES (last);
2355 PATTERN (insn) = PATTERN (last);
2357 delete_insn (last);
2359 else
2360 PATTERN (insn) = pat;
2362 return;
2365 if ((SET_DEST (x) == var
2366 || (GET_CODE (SET_DEST (x)) == SUBREG
2367 && SUBREG_REG (SET_DEST (x)) == var))
2368 && (GET_CODE (SET_SRC (x)) == REG
2369 || (GET_CODE (SET_SRC (x)) == SUBREG
2370 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2371 && GET_MODE (var) == promoted_mode
2372 && x == single_set (insn))
2374 rtx pat, last;
2376 if (GET_CODE (SET_DEST (x)) == SUBREG)
2377 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2378 else
2379 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2381 if (recog_memoized (insn) >= 0)
2382 return;
2384 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2385 if (GET_CODE (pat) == SEQUENCE)
2387 last = emit_insn_before (pat, insn);
2389 /* INSN might have REG_RETVAL or other important notes, so
2390 we need to store the pattern of the last insn in the
2391 sequence into INSN similarly to the normal case. LAST
2392 should not have REG_NOTES, but we allow them if INSN has
2393 no REG_NOTES. */
2394 if (REG_NOTES (last) && REG_NOTES (insn))
2395 abort ();
2396 if (REG_NOTES (last))
2397 REG_NOTES (insn) = REG_NOTES (last);
2398 PATTERN (insn) = PATTERN (last);
2400 delete_insn (last);
2402 else
2403 PATTERN (insn) = pat;
2405 return;
2408 /* Otherwise, storing into VAR must be handled specially
2409 by storing into a temporary and copying that into VAR
2410 with a new insn after this one. Note that this case
2411 will be used when storing into a promoted scalar since
2412 the insn will now have different modes on the input
2413 and output and hence will be invalid (except for the case
2414 of setting it to a constant, which does not need any
2415 change if it is valid). We generate extra code in that case,
2416 but combine.c will eliminate it. */
2418 if (dest == var)
2420 rtx temp;
2421 rtx fixeddest = SET_DEST (x);
2423 /* STRICT_LOW_PART can be discarded, around a MEM. */
2424 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2425 fixeddest = XEXP (fixeddest, 0);
2426 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2427 if (GET_CODE (fixeddest) == SUBREG)
2429 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2430 promoted_mode = GET_MODE (fixeddest);
2432 else
2433 fixeddest = fixup_stack_1 (fixeddest, insn);
2435 temp = gen_reg_rtx (promoted_mode);
2437 emit_insn_after (gen_move_insn (fixeddest,
2438 gen_lowpart (GET_MODE (fixeddest),
2439 temp)),
2440 insn);
2442 SET_DEST (x) = temp;
2446 default:
2447 break;
2450 /* Nothing special about this RTX; fix its operands. */
2452 fmt = GET_RTX_FORMAT (code);
2453 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2455 if (fmt[i] == 'e')
2456 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2457 else if (fmt[i] == 'E')
2459 int j;
2460 for (j = 0; j < XVECLEN (x, i); j++)
2461 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2462 insn, replacements);
2467 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2468 return an rtx (MEM:m1 newaddr) which is equivalent.
2469 If any insns must be emitted to compute NEWADDR, put them before INSN.
2471 UNCRITICAL nonzero means accept paradoxical subregs.
2472 This is used for subregs found inside REG_NOTES. */
2474 static rtx
2475 fixup_memory_subreg (x, insn, uncritical)
2476 rtx x;
2477 rtx insn;
2478 int uncritical;
2480 int offset = SUBREG_BYTE (x);
2481 rtx addr = XEXP (SUBREG_REG (x), 0);
2482 enum machine_mode mode = GET_MODE (x);
2483 rtx result;
2485 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2486 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2487 && ! uncritical)
2488 abort ();
2490 if (!flag_force_addr
2491 && memory_address_p (mode, plus_constant (addr, offset)))
2492 /* Shortcut if no insns need be emitted. */
2493 return adjust_address (SUBREG_REG (x), mode, offset);
2495 start_sequence ();
2496 result = adjust_address (SUBREG_REG (x), mode, offset);
2497 emit_insn_before (gen_sequence (), insn);
2498 end_sequence ();
2499 return result;
2502 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2503 Replace subexpressions of X in place.
2504 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2505 Otherwise return X, with its contents possibly altered.
2507 If any insns must be emitted to compute NEWADDR, put them before INSN.
2509 UNCRITICAL is as in fixup_memory_subreg. */
2511 static rtx
2512 walk_fixup_memory_subreg (x, insn, uncritical)
2513 rtx x;
2514 rtx insn;
2515 int uncritical;
2517 enum rtx_code code;
2518 const char *fmt;
2519 int i;
2521 if (x == 0)
2522 return 0;
2524 code = GET_CODE (x);
2526 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2527 return fixup_memory_subreg (x, insn, uncritical);
2529 /* Nothing special about this RTX; fix its operands. */
2531 fmt = GET_RTX_FORMAT (code);
2532 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2534 if (fmt[i] == 'e')
2535 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2536 else if (fmt[i] == 'E')
2538 int j;
2539 for (j = 0; j < XVECLEN (x, i); j++)
2540 XVECEXP (x, i, j)
2541 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2544 return x;
2547 /* For each memory ref within X, if it refers to a stack slot
2548 with an out of range displacement, put the address in a temp register
2549 (emitting new insns before INSN to load these registers)
2550 and alter the memory ref to use that register.
2551 Replace each such MEM rtx with a copy, to avoid clobberage. */
2553 static rtx
2554 fixup_stack_1 (x, insn)
2555 rtx x;
2556 rtx insn;
2558 int i;
2559 RTX_CODE code = GET_CODE (x);
2560 const char *fmt;
2562 if (code == MEM)
2564 rtx ad = XEXP (x, 0);
2565 /* If we have address of a stack slot but it's not valid
2566 (displacement is too large), compute the sum in a register. */
2567 if (GET_CODE (ad) == PLUS
2568 && GET_CODE (XEXP (ad, 0)) == REG
2569 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2570 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2571 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2572 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2573 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2574 #endif
2575 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2576 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2577 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2578 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2580 rtx temp, seq;
2581 if (memory_address_p (GET_MODE (x), ad))
2582 return x;
2584 start_sequence ();
2585 temp = copy_to_reg (ad);
2586 seq = gen_sequence ();
2587 end_sequence ();
2588 emit_insn_before (seq, insn);
2589 return replace_equiv_address (x, temp);
2591 return x;
2594 fmt = GET_RTX_FORMAT (code);
2595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2597 if (fmt[i] == 'e')
2598 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2599 else if (fmt[i] == 'E')
2601 int j;
2602 for (j = 0; j < XVECLEN (x, i); j++)
2603 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2606 return x;
2609 /* Optimization: a bit-field instruction whose field
2610 happens to be a byte or halfword in memory
2611 can be changed to a move instruction.
2613 We call here when INSN is an insn to examine or store into a bit-field.
2614 BODY is the SET-rtx to be altered.
2616 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2617 (Currently this is called only from function.c, and EQUIV_MEM
2618 is always 0.) */
2620 static void
2621 optimize_bit_field (body, insn, equiv_mem)
2622 rtx body;
2623 rtx insn;
2624 rtx *equiv_mem;
2626 rtx bitfield;
2627 int destflag;
2628 rtx seq = 0;
2629 enum machine_mode mode;
2631 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2632 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2633 bitfield = SET_DEST (body), destflag = 1;
2634 else
2635 bitfield = SET_SRC (body), destflag = 0;
2637 /* First check that the field being stored has constant size and position
2638 and is in fact a byte or halfword suitably aligned. */
2640 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2641 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2642 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2643 != BLKmode)
2644 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2646 rtx memref = 0;
2648 /* Now check that the containing word is memory, not a register,
2649 and that it is safe to change the machine mode. */
2651 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2652 memref = XEXP (bitfield, 0);
2653 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2654 && equiv_mem != 0)
2655 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2656 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2657 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2658 memref = SUBREG_REG (XEXP (bitfield, 0));
2659 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2660 && equiv_mem != 0
2661 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2662 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2664 if (memref
2665 && ! mode_dependent_address_p (XEXP (memref, 0))
2666 && ! MEM_VOLATILE_P (memref))
2668 /* Now adjust the address, first for any subreg'ing
2669 that we are now getting rid of,
2670 and then for which byte of the word is wanted. */
2672 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2673 rtx insns;
2675 /* Adjust OFFSET to count bits from low-address byte. */
2676 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2677 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2678 - offset - INTVAL (XEXP (bitfield, 1)));
2680 /* Adjust OFFSET to count bytes from low-address byte. */
2681 offset /= BITS_PER_UNIT;
2682 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2684 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2685 / UNITS_PER_WORD) * UNITS_PER_WORD;
2686 if (BYTES_BIG_ENDIAN)
2687 offset -= (MIN (UNITS_PER_WORD,
2688 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2689 - MIN (UNITS_PER_WORD,
2690 GET_MODE_SIZE (GET_MODE (memref))));
2693 start_sequence ();
2694 memref = adjust_address (memref, mode, offset);
2695 insns = get_insns ();
2696 end_sequence ();
2697 emit_insns_before (insns, insn);
2699 /* Store this memory reference where
2700 we found the bit field reference. */
2702 if (destflag)
2704 validate_change (insn, &SET_DEST (body), memref, 1);
2705 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2707 rtx src = SET_SRC (body);
2708 while (GET_CODE (src) == SUBREG
2709 && SUBREG_BYTE (src) == 0)
2710 src = SUBREG_REG (src);
2711 if (GET_MODE (src) != GET_MODE (memref))
2712 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2713 validate_change (insn, &SET_SRC (body), src, 1);
2715 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2716 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2717 /* This shouldn't happen because anything that didn't have
2718 one of these modes should have got converted explicitly
2719 and then referenced through a subreg.
2720 This is so because the original bit-field was
2721 handled by agg_mode and so its tree structure had
2722 the same mode that memref now has. */
2723 abort ();
2725 else
2727 rtx dest = SET_DEST (body);
2729 while (GET_CODE (dest) == SUBREG
2730 && SUBREG_BYTE (dest) == 0
2731 && (GET_MODE_CLASS (GET_MODE (dest))
2732 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2733 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2734 <= UNITS_PER_WORD))
2735 dest = SUBREG_REG (dest);
2737 validate_change (insn, &SET_DEST (body), dest, 1);
2739 if (GET_MODE (dest) == GET_MODE (memref))
2740 validate_change (insn, &SET_SRC (body), memref, 1);
2741 else
2743 /* Convert the mem ref to the destination mode. */
2744 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2746 start_sequence ();
2747 convert_move (newreg, memref,
2748 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2749 seq = get_insns ();
2750 end_sequence ();
2752 validate_change (insn, &SET_SRC (body), newreg, 1);
2756 /* See if we can convert this extraction or insertion into
2757 a simple move insn. We might not be able to do so if this
2758 was, for example, part of a PARALLEL.
2760 If we succeed, write out any needed conversions. If we fail,
2761 it is hard to guess why we failed, so don't do anything
2762 special; just let the optimization be suppressed. */
2764 if (apply_change_group () && seq)
2765 emit_insns_before (seq, insn);
2770 /* These routines are responsible for converting virtual register references
2771 to the actual hard register references once RTL generation is complete.
2773 The following four variables are used for communication between the
2774 routines. They contain the offsets of the virtual registers from their
2775 respective hard registers. */
2777 static int in_arg_offset;
2778 static int var_offset;
2779 static int dynamic_offset;
2780 static int out_arg_offset;
2781 static int cfa_offset;
2783 /* In most machines, the stack pointer register is equivalent to the bottom
2784 of the stack. */
2786 #ifndef STACK_POINTER_OFFSET
2787 #define STACK_POINTER_OFFSET 0
2788 #endif
2790 /* If not defined, pick an appropriate default for the offset of dynamically
2791 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2792 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2794 #ifndef STACK_DYNAMIC_OFFSET
2796 /* The bottom of the stack points to the actual arguments. If
2797 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2798 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2799 stack space for register parameters is not pushed by the caller, but
2800 rather part of the fixed stack areas and hence not included in
2801 `current_function_outgoing_args_size'. Nevertheless, we must allow
2802 for it when allocating stack dynamic objects. */
2804 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2805 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2806 ((ACCUMULATE_OUTGOING_ARGS \
2807 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2808 + (STACK_POINTER_OFFSET)) \
2810 #else
2811 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2812 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2813 + (STACK_POINTER_OFFSET))
2814 #endif
2815 #endif
2817 /* On most machines, the CFA coincides with the first incoming parm. */
2819 #ifndef ARG_POINTER_CFA_OFFSET
2820 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2821 #endif
2823 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2824 its address taken. DECL is the decl for the object stored in the
2825 register, for later use if we do need to force REG into the stack.
2826 REG is overwritten by the MEM like in put_reg_into_stack. */
2829 gen_mem_addressof (reg, decl)
2830 rtx reg;
2831 tree decl;
2833 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2834 REGNO (reg), decl);
2836 /* Calculate this before we start messing with decl's RTL. */
2837 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2839 /* If the original REG was a user-variable, then so is the REG whose
2840 address is being taken. Likewise for unchanging. */
2841 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2842 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2844 PUT_CODE (reg, MEM);
2845 MEM_ATTRS (reg) = 0;
2846 XEXP (reg, 0) = r;
2848 if (decl)
2850 tree type = TREE_TYPE (decl);
2851 enum machine_mode decl_mode
2852 = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
2853 : DECL_MODE (decl));
2854 rtx decl_rtl = decl ? DECL_RTL_IF_SET (decl) : 0;
2856 PUT_MODE (reg, decl_mode);
2858 /* Clear DECL_RTL momentarily so functions below will work
2859 properly, then set it again. */
2860 if (decl_rtl == reg)
2861 SET_DECL_RTL (decl, 0);
2863 set_mem_attributes (reg, decl, 1);
2864 set_mem_alias_set (reg, set);
2866 if (decl_rtl == reg)
2867 SET_DECL_RTL (decl, reg);
2869 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2870 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2872 else
2873 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2875 return reg;
2878 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2880 void
2881 flush_addressof (decl)
2882 tree decl;
2884 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2885 && DECL_RTL (decl) != 0
2886 && GET_CODE (DECL_RTL (decl)) == MEM
2887 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2888 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2889 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2892 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2894 static void
2895 put_addressof_into_stack (r, ht)
2896 rtx r;
2897 struct hash_table *ht;
2899 tree decl, type;
2900 int volatile_p, used_p;
2902 rtx reg = XEXP (r, 0);
2904 if (GET_CODE (reg) != REG)
2905 abort ();
2907 decl = ADDRESSOF_DECL (r);
2908 if (decl)
2910 type = TREE_TYPE (decl);
2911 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2912 && TREE_THIS_VOLATILE (decl));
2913 used_p = (TREE_USED (decl)
2914 || (TREE_CODE (decl) != SAVE_EXPR
2915 && DECL_INITIAL (decl) != 0));
2917 else
2919 type = NULL_TREE;
2920 volatile_p = 0;
2921 used_p = 1;
2924 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2925 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2928 /* List of replacements made below in purge_addressof_1 when creating
2929 bitfield insertions. */
2930 static rtx purge_bitfield_addressof_replacements;
2932 /* List of replacements made below in purge_addressof_1 for patterns
2933 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2934 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2935 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2936 enough in complex cases, e.g. when some field values can be
2937 extracted by usage MEM with narrower mode. */
2938 static rtx purge_addressof_replacements;
2940 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2941 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2942 the stack. If the function returns FALSE then the replacement could not
2943 be made. */
2945 static bool
2946 purge_addressof_1 (loc, insn, force, store, ht)
2947 rtx *loc;
2948 rtx insn;
2949 int force, store;
2950 struct hash_table *ht;
2952 rtx x;
2953 RTX_CODE code;
2954 int i, j;
2955 const char *fmt;
2956 bool result = true;
2958 /* Re-start here to avoid recursion in common cases. */
2959 restart:
2961 x = *loc;
2962 if (x == 0)
2963 return true;
2965 code = GET_CODE (x);
2967 /* If we don't return in any of the cases below, we will recurse inside
2968 the RTX, which will normally result in any ADDRESSOF being forced into
2969 memory. */
2970 if (code == SET)
2972 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2973 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2974 return result;
2976 else if (code == ADDRESSOF)
2978 rtx sub, insns;
2980 if (GET_CODE (XEXP (x, 0)) != MEM)
2982 put_addressof_into_stack (x, ht);
2983 return true;
2986 /* We must create a copy of the rtx because it was created by
2987 overwriting a REG rtx which is always shared. */
2988 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2989 if (validate_change (insn, loc, sub, 0)
2990 || validate_replace_rtx (x, sub, insn))
2991 return true;
2993 start_sequence ();
2994 sub = force_operand (sub, NULL_RTX);
2995 if (! validate_change (insn, loc, sub, 0)
2996 && ! validate_replace_rtx (x, sub, insn))
2997 abort ();
2999 insns = gen_sequence ();
3000 end_sequence ();
3001 emit_insn_before (insns, insn);
3002 return true;
3005 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3007 rtx sub = XEXP (XEXP (x, 0), 0);
3009 if (GET_CODE (sub) == MEM)
3010 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3011 else if (GET_CODE (sub) == REG
3012 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3014 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3016 int size_x, size_sub;
3018 if (!insn)
3020 /* When processing REG_NOTES look at the list of
3021 replacements done on the insn to find the register that X
3022 was replaced by. */
3023 rtx tem;
3025 for (tem = purge_bitfield_addressof_replacements;
3026 tem != NULL_RTX;
3027 tem = XEXP (XEXP (tem, 1), 1))
3028 if (rtx_equal_p (x, XEXP (tem, 0)))
3030 *loc = XEXP (XEXP (tem, 1), 0);
3031 return true;
3034 /* See comment for purge_addressof_replacements. */
3035 for (tem = purge_addressof_replacements;
3036 tem != NULL_RTX;
3037 tem = XEXP (XEXP (tem, 1), 1))
3038 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3040 rtx z = XEXP (XEXP (tem, 1), 0);
3042 if (GET_MODE (x) == GET_MODE (z)
3043 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3044 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3045 abort ();
3047 /* It can happen that the note may speak of things
3048 in a wider (or just different) mode than the
3049 code did. This is especially true of
3050 REG_RETVAL. */
3052 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3053 z = SUBREG_REG (z);
3055 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3056 && (GET_MODE_SIZE (GET_MODE (x))
3057 > GET_MODE_SIZE (GET_MODE (z))))
3059 /* This can occur as a result in invalid
3060 pointer casts, e.g. float f; ...
3061 *(long long int *)&f.
3062 ??? We could emit a warning here, but
3063 without a line number that wouldn't be
3064 very helpful. */
3065 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3067 else
3068 z = gen_lowpart (GET_MODE (x), z);
3070 *loc = z;
3071 return true;
3074 /* Sometimes we may not be able to find the replacement. For
3075 example when the original insn was a MEM in a wider mode,
3076 and the note is part of a sign extension of a narrowed
3077 version of that MEM. Gcc testcase compile/990829-1.c can
3078 generate an example of this siutation. Rather than complain
3079 we return false, which will prompt our caller to remove the
3080 offending note. */
3081 return false;
3084 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3085 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3087 /* Don't even consider working with paradoxical subregs,
3088 or the moral equivalent seen here. */
3089 if (size_x <= size_sub
3090 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3092 /* Do a bitfield insertion to mirror what would happen
3093 in memory. */
3095 rtx val, seq;
3097 if (store)
3099 rtx p = PREV_INSN (insn);
3101 start_sequence ();
3102 val = gen_reg_rtx (GET_MODE (x));
3103 if (! validate_change (insn, loc, val, 0))
3105 /* Discard the current sequence and put the
3106 ADDRESSOF on stack. */
3107 end_sequence ();
3108 goto give_up;
3110 seq = gen_sequence ();
3111 end_sequence ();
3112 emit_insn_before (seq, insn);
3113 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3114 insn, ht);
3116 start_sequence ();
3117 store_bit_field (sub, size_x, 0, GET_MODE (x),
3118 val, GET_MODE_SIZE (GET_MODE (sub)));
3120 /* Make sure to unshare any shared rtl that store_bit_field
3121 might have created. */
3122 unshare_all_rtl_again (get_insns ());
3124 seq = gen_sequence ();
3125 end_sequence ();
3126 p = emit_insn_after (seq, insn);
3127 if (NEXT_INSN (insn))
3128 compute_insns_for_mem (NEXT_INSN (insn),
3129 p ? NEXT_INSN (p) : NULL_RTX,
3130 ht);
3132 else
3134 rtx p = PREV_INSN (insn);
3136 start_sequence ();
3137 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3138 GET_MODE (x), GET_MODE (x),
3139 GET_MODE_SIZE (GET_MODE (sub)));
3141 if (! validate_change (insn, loc, val, 0))
3143 /* Discard the current sequence and put the
3144 ADDRESSOF on stack. */
3145 end_sequence ();
3146 goto give_up;
3149 seq = gen_sequence ();
3150 end_sequence ();
3151 emit_insn_before (seq, insn);
3152 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3153 insn, ht);
3156 /* Remember the replacement so that the same one can be done
3157 on the REG_NOTES. */
3158 purge_bitfield_addressof_replacements
3159 = gen_rtx_EXPR_LIST (VOIDmode, x,
3160 gen_rtx_EXPR_LIST
3161 (VOIDmode, val,
3162 purge_bitfield_addressof_replacements));
3164 /* We replaced with a reg -- all done. */
3165 return true;
3169 else if (validate_change (insn, loc, sub, 0))
3171 /* Remember the replacement so that the same one can be done
3172 on the REG_NOTES. */
3173 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3175 rtx tem;
3177 for (tem = purge_addressof_replacements;
3178 tem != NULL_RTX;
3179 tem = XEXP (XEXP (tem, 1), 1))
3180 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3182 XEXP (XEXP (tem, 1), 0) = sub;
3183 return true;
3185 purge_addressof_replacements
3186 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3187 gen_rtx_EXPR_LIST (VOIDmode, sub,
3188 purge_addressof_replacements));
3189 return true;
3191 goto restart;
3195 give_up:
3196 /* Scan all subexpressions. */
3197 fmt = GET_RTX_FORMAT (code);
3198 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3200 if (*fmt == 'e')
3201 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3202 else if (*fmt == 'E')
3203 for (j = 0; j < XVECLEN (x, i); j++)
3204 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3207 return result;
3210 /* Return a new hash table entry in HT. */
3212 static struct hash_entry *
3213 insns_for_mem_newfunc (he, ht, k)
3214 struct hash_entry *he;
3215 struct hash_table *ht;
3216 hash_table_key k ATTRIBUTE_UNUSED;
3218 struct insns_for_mem_entry *ifmhe;
3219 if (he)
3220 return he;
3222 ifmhe = ((struct insns_for_mem_entry *)
3223 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3224 ifmhe->insns = NULL_RTX;
3226 return &ifmhe->he;
3229 /* Return a hash value for K, a REG. */
3231 static unsigned long
3232 insns_for_mem_hash (k)
3233 hash_table_key k;
3235 /* K is really a RTX. Just use the address as the hash value. */
3236 return (unsigned long) k;
3239 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3241 static bool
3242 insns_for_mem_comp (k1, k2)
3243 hash_table_key k1;
3244 hash_table_key k2;
3246 return k1 == k2;
3249 struct insns_for_mem_walk_info
3251 /* The hash table that we are using to record which INSNs use which
3252 MEMs. */
3253 struct hash_table *ht;
3255 /* The INSN we are currently proessing. */
3256 rtx insn;
3258 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3259 to find the insns that use the REGs in the ADDRESSOFs. */
3260 int pass;
3263 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3264 that might be used in an ADDRESSOF expression, record this INSN in
3265 the hash table given by DATA (which is really a pointer to an
3266 insns_for_mem_walk_info structure). */
3268 static int
3269 insns_for_mem_walk (r, data)
3270 rtx *r;
3271 void *data;
3273 struct insns_for_mem_walk_info *ifmwi
3274 = (struct insns_for_mem_walk_info *) data;
3276 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3277 && GET_CODE (XEXP (*r, 0)) == REG)
3278 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3279 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3281 /* Lookup this MEM in the hashtable, creating it if necessary. */
3282 struct insns_for_mem_entry *ifme
3283 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3285 /*create=*/0,
3286 /*copy=*/0);
3288 /* If we have not already recorded this INSN, do so now. Since
3289 we process the INSNs in order, we know that if we have
3290 recorded it it must be at the front of the list. */
3291 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3292 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3293 ifme->insns);
3296 return 0;
3299 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3300 which REGs in HT. */
3302 static void
3303 compute_insns_for_mem (insns, last_insn, ht)
3304 rtx insns;
3305 rtx last_insn;
3306 struct hash_table *ht;
3308 rtx insn;
3309 struct insns_for_mem_walk_info ifmwi;
3310 ifmwi.ht = ht;
3312 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3313 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3314 if (INSN_P (insn))
3316 ifmwi.insn = insn;
3317 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3321 /* Helper function for purge_addressof called through for_each_rtx.
3322 Returns true iff the rtl is an ADDRESSOF. */
3324 static int
3325 is_addressof (rtl, data)
3326 rtx *rtl;
3327 void *data ATTRIBUTE_UNUSED;
3329 return GET_CODE (*rtl) == ADDRESSOF;
3332 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3333 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3334 stack. */
3336 void
3337 purge_addressof (insns)
3338 rtx insns;
3340 rtx insn;
3341 struct hash_table ht;
3343 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3344 requires a fixup pass over the instruction stream to correct
3345 INSNs that depended on the REG being a REG, and not a MEM. But,
3346 these fixup passes are slow. Furthermore, most MEMs are not
3347 mentioned in very many instructions. So, we speed up the process
3348 by pre-calculating which REGs occur in which INSNs; that allows
3349 us to perform the fixup passes much more quickly. */
3350 hash_table_init (&ht,
3351 insns_for_mem_newfunc,
3352 insns_for_mem_hash,
3353 insns_for_mem_comp);
3354 compute_insns_for_mem (insns, NULL_RTX, &ht);
3356 for (insn = insns; insn; insn = NEXT_INSN (insn))
3357 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3358 || GET_CODE (insn) == CALL_INSN)
3360 if (! purge_addressof_1 (&PATTERN (insn), insn,
3361 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3362 /* If we could not replace the ADDRESSOFs in the insn,
3363 something is wrong. */
3364 abort ();
3366 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3368 /* If we could not replace the ADDRESSOFs in the insn's notes,
3369 we can just remove the offending notes instead. */
3370 rtx note;
3372 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3374 /* If we find a REG_RETVAL note then the insn is a libcall.
3375 Such insns must have REG_EQUAL notes as well, in order
3376 for later passes of the compiler to work. So it is not
3377 safe to delete the notes here, and instead we abort. */
3378 if (REG_NOTE_KIND (note) == REG_RETVAL)
3379 abort ();
3380 if (for_each_rtx (&note, is_addressof, NULL))
3381 remove_note (insn, note);
3386 /* Clean up. */
3387 hash_table_free (&ht);
3388 purge_bitfield_addressof_replacements = 0;
3389 purge_addressof_replacements = 0;
3391 /* REGs are shared. purge_addressof will destructively replace a REG
3392 with a MEM, which creates shared MEMs.
3394 Unfortunately, the children of put_reg_into_stack assume that MEMs
3395 referring to the same stack slot are shared (fixup_var_refs and
3396 the associated hash table code).
3398 So, we have to do another unsharing pass after we have flushed any
3399 REGs that had their address taken into the stack.
3401 It may be worth tracking whether or not we converted any REGs into
3402 MEMs to avoid this overhead when it is not needed. */
3403 unshare_all_rtl_again (get_insns ());
3406 /* Convert a SET of a hard subreg to a set of the appropriet hard
3407 register. A subroutine of purge_hard_subreg_sets. */
3409 static void
3410 purge_single_hard_subreg_set (pattern)
3411 rtx pattern;
3413 rtx reg = SET_DEST (pattern);
3414 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3415 int offset = 0;
3417 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3418 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3420 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3421 GET_MODE (SUBREG_REG (reg)),
3422 SUBREG_BYTE (reg),
3423 GET_MODE (reg));
3424 reg = SUBREG_REG (reg);
3428 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3430 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3431 SET_DEST (pattern) = reg;
3435 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3436 only such SETs that we expect to see are those left in because
3437 integrate can't handle sets of parts of a return value register.
3439 We don't use alter_subreg because we only want to eliminate subregs
3440 of hard registers. */
3442 void
3443 purge_hard_subreg_sets (insn)
3444 rtx insn;
3446 for (; insn; insn = NEXT_INSN (insn))
3448 if (INSN_P (insn))
3450 rtx pattern = PATTERN (insn);
3451 switch (GET_CODE (pattern))
3453 case SET:
3454 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3455 purge_single_hard_subreg_set (pattern);
3456 break;
3457 case PARALLEL:
3459 int j;
3460 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3462 rtx inner_pattern = XVECEXP (pattern, 0, j);
3463 if (GET_CODE (inner_pattern) == SET
3464 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3465 purge_single_hard_subreg_set (inner_pattern);
3468 break;
3469 default:
3470 break;
3476 /* Pass through the INSNS of function FNDECL and convert virtual register
3477 references to hard register references. */
3479 void
3480 instantiate_virtual_regs (fndecl, insns)
3481 tree fndecl;
3482 rtx insns;
3484 rtx insn;
3485 unsigned int i;
3487 /* Compute the offsets to use for this function. */
3488 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3489 var_offset = STARTING_FRAME_OFFSET;
3490 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3491 out_arg_offset = STACK_POINTER_OFFSET;
3492 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3494 /* Scan all variables and parameters of this function. For each that is
3495 in memory, instantiate all virtual registers if the result is a valid
3496 address. If not, we do it later. That will handle most uses of virtual
3497 regs on many machines. */
3498 instantiate_decls (fndecl, 1);
3500 /* Initialize recognition, indicating that volatile is OK. */
3501 init_recog ();
3503 /* Scan through all the insns, instantiating every virtual register still
3504 present. */
3505 for (insn = insns; insn; insn = NEXT_INSN (insn))
3506 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3507 || GET_CODE (insn) == CALL_INSN)
3509 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3510 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3511 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3512 if (GET_CODE (insn) == CALL_INSN)
3513 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3514 NULL_RTX, 0);
3517 /* Instantiate the stack slots for the parm registers, for later use in
3518 addressof elimination. */
3519 for (i = 0; i < max_parm_reg; ++i)
3520 if (parm_reg_stack_loc[i])
3521 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3523 /* Now instantiate the remaining register equivalences for debugging info.
3524 These will not be valid addresses. */
3525 instantiate_decls (fndecl, 0);
3527 /* Indicate that, from now on, assign_stack_local should use
3528 frame_pointer_rtx. */
3529 virtuals_instantiated = 1;
3532 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3533 all virtual registers in their DECL_RTL's.
3535 If VALID_ONLY, do this only if the resulting address is still valid.
3536 Otherwise, always do it. */
3538 static void
3539 instantiate_decls (fndecl, valid_only)
3540 tree fndecl;
3541 int valid_only;
3543 tree decl;
3545 /* Process all parameters of the function. */
3546 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3548 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3549 HOST_WIDE_INT size_rtl;
3551 instantiate_decl (DECL_RTL (decl), size, valid_only);
3553 /* If the parameter was promoted, then the incoming RTL mode may be
3554 larger than the declared type size. We must use the larger of
3555 the two sizes. */
3556 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3557 size = MAX (size_rtl, size);
3558 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3561 /* Now process all variables defined in the function or its subblocks. */
3562 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3565 /* Subroutine of instantiate_decls: Process all decls in the given
3566 BLOCK node and all its subblocks. */
3568 static void
3569 instantiate_decls_1 (let, valid_only)
3570 tree let;
3571 int valid_only;
3573 tree t;
3575 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3576 if (DECL_RTL_SET_P (t))
3577 instantiate_decl (DECL_RTL (t),
3578 int_size_in_bytes (TREE_TYPE (t)),
3579 valid_only);
3581 /* Process all subblocks. */
3582 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3583 instantiate_decls_1 (t, valid_only);
3586 /* Subroutine of the preceding procedures: Given RTL representing a
3587 decl and the size of the object, do any instantiation required.
3589 If VALID_ONLY is non-zero, it means that the RTL should only be
3590 changed if the new address is valid. */
3592 static void
3593 instantiate_decl (x, size, valid_only)
3594 rtx x;
3595 HOST_WIDE_INT size;
3596 int valid_only;
3598 enum machine_mode mode;
3599 rtx addr;
3601 /* If this is not a MEM, no need to do anything. Similarly if the
3602 address is a constant or a register that is not a virtual register. */
3604 if (x == 0 || GET_CODE (x) != MEM)
3605 return;
3607 addr = XEXP (x, 0);
3608 if (CONSTANT_P (addr)
3609 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3610 || (GET_CODE (addr) == REG
3611 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3612 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3613 return;
3615 /* If we should only do this if the address is valid, copy the address.
3616 We need to do this so we can undo any changes that might make the
3617 address invalid. This copy is unfortunate, but probably can't be
3618 avoided. */
3620 if (valid_only)
3621 addr = copy_rtx (addr);
3623 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3625 if (valid_only && size >= 0)
3627 unsigned HOST_WIDE_INT decl_size = size;
3629 /* Now verify that the resulting address is valid for every integer or
3630 floating-point mode up to and including SIZE bytes long. We do this
3631 since the object might be accessed in any mode and frame addresses
3632 are shared. */
3634 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3635 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3636 mode = GET_MODE_WIDER_MODE (mode))
3637 if (! memory_address_p (mode, addr))
3638 return;
3640 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3641 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3642 mode = GET_MODE_WIDER_MODE (mode))
3643 if (! memory_address_p (mode, addr))
3644 return;
3647 /* Put back the address now that we have updated it and we either know
3648 it is valid or we don't care whether it is valid. */
3650 XEXP (x, 0) = addr;
3653 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3654 is a virtual register, return the requivalent hard register and set the
3655 offset indirectly through the pointer. Otherwise, return 0. */
3657 static rtx
3658 instantiate_new_reg (x, poffset)
3659 rtx x;
3660 HOST_WIDE_INT *poffset;
3662 rtx new;
3663 HOST_WIDE_INT offset;
3665 if (x == virtual_incoming_args_rtx)
3666 new = arg_pointer_rtx, offset = in_arg_offset;
3667 else if (x == virtual_stack_vars_rtx)
3668 new = frame_pointer_rtx, offset = var_offset;
3669 else if (x == virtual_stack_dynamic_rtx)
3670 new = stack_pointer_rtx, offset = dynamic_offset;
3671 else if (x == virtual_outgoing_args_rtx)
3672 new = stack_pointer_rtx, offset = out_arg_offset;
3673 else if (x == virtual_cfa_rtx)
3674 new = arg_pointer_rtx, offset = cfa_offset;
3675 else
3676 return 0;
3678 *poffset = offset;
3679 return new;
3682 /* Given a pointer to a piece of rtx and an optional pointer to the
3683 containing object, instantiate any virtual registers present in it.
3685 If EXTRA_INSNS, we always do the replacement and generate
3686 any extra insns before OBJECT. If it zero, we do nothing if replacement
3687 is not valid.
3689 Return 1 if we either had nothing to do or if we were able to do the
3690 needed replacement. Return 0 otherwise; we only return zero if
3691 EXTRA_INSNS is zero.
3693 We first try some simple transformations to avoid the creation of extra
3694 pseudos. */
3696 static int
3697 instantiate_virtual_regs_1 (loc, object, extra_insns)
3698 rtx *loc;
3699 rtx object;
3700 int extra_insns;
3702 rtx x;
3703 RTX_CODE code;
3704 rtx new = 0;
3705 HOST_WIDE_INT offset = 0;
3706 rtx temp;
3707 rtx seq;
3708 int i, j;
3709 const char *fmt;
3711 /* Re-start here to avoid recursion in common cases. */
3712 restart:
3714 x = *loc;
3715 if (x == 0)
3716 return 1;
3718 code = GET_CODE (x);
3720 /* Check for some special cases. */
3721 switch (code)
3723 case CONST_INT:
3724 case CONST_DOUBLE:
3725 case CONST:
3726 case SYMBOL_REF:
3727 case CODE_LABEL:
3728 case PC:
3729 case CC0:
3730 case ASM_INPUT:
3731 case ADDR_VEC:
3732 case ADDR_DIFF_VEC:
3733 case RETURN:
3734 return 1;
3736 case SET:
3737 /* We are allowed to set the virtual registers. This means that
3738 the actual register should receive the source minus the
3739 appropriate offset. This is used, for example, in the handling
3740 of non-local gotos. */
3741 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3743 rtx src = SET_SRC (x);
3745 /* We are setting the register, not using it, so the relevant
3746 offset is the negative of the offset to use were we using
3747 the register. */
3748 offset = - offset;
3749 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3751 /* The only valid sources here are PLUS or REG. Just do
3752 the simplest possible thing to handle them. */
3753 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3754 abort ();
3756 start_sequence ();
3757 if (GET_CODE (src) != REG)
3758 temp = force_operand (src, NULL_RTX);
3759 else
3760 temp = src;
3761 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3762 seq = get_insns ();
3763 end_sequence ();
3765 emit_insns_before (seq, object);
3766 SET_DEST (x) = new;
3768 if (! validate_change (object, &SET_SRC (x), temp, 0)
3769 || ! extra_insns)
3770 abort ();
3772 return 1;
3775 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3776 loc = &SET_SRC (x);
3777 goto restart;
3779 case PLUS:
3780 /* Handle special case of virtual register plus constant. */
3781 if (CONSTANT_P (XEXP (x, 1)))
3783 rtx old, new_offset;
3785 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3786 if (GET_CODE (XEXP (x, 0)) == PLUS)
3788 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3790 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3791 extra_insns);
3792 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3794 else
3796 loc = &XEXP (x, 0);
3797 goto restart;
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3803 we can commute the PLUS and SUBREG because pointers into the
3804 frame are well-behaved. */
3805 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3806 && GET_CODE (XEXP (x, 1)) == CONST_INT
3807 && 0 != (new
3808 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3809 &offset))
3810 && validate_change (object, loc,
3811 plus_constant (gen_lowpart (ptr_mode,
3812 new),
3813 offset
3814 + INTVAL (XEXP (x, 1))),
3816 return 1;
3817 #endif
3818 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3820 /* We know the second operand is a constant. Unless the
3821 first operand is a REG (which has been already checked),
3822 it needs to be checked. */
3823 if (GET_CODE (XEXP (x, 0)) != REG)
3825 loc = &XEXP (x, 0);
3826 goto restart;
3828 return 1;
3831 new_offset = plus_constant (XEXP (x, 1), offset);
3833 /* If the new constant is zero, try to replace the sum with just
3834 the register. */
3835 if (new_offset == const0_rtx
3836 && validate_change (object, loc, new, 0))
3837 return 1;
3839 /* Next try to replace the register and new offset.
3840 There are two changes to validate here and we can't assume that
3841 in the case of old offset equals new just changing the register
3842 will yield a valid insn. In the interests of a little efficiency,
3843 however, we only call validate change once (we don't queue up the
3844 changes and then call apply_change_group). */
3846 old = XEXP (x, 0);
3847 if (offset == 0
3848 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3849 : (XEXP (x, 0) = new,
3850 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3852 if (! extra_insns)
3854 XEXP (x, 0) = old;
3855 return 0;
3858 /* Otherwise copy the new constant into a register and replace
3859 constant with that register. */
3860 temp = gen_reg_rtx (Pmode);
3861 XEXP (x, 0) = new;
3862 if (validate_change (object, &XEXP (x, 1), temp, 0))
3863 emit_insn_before (gen_move_insn (temp, new_offset), object);
3864 else
3866 /* If that didn't work, replace this expression with a
3867 register containing the sum. */
3869 XEXP (x, 0) = old;
3870 new = gen_rtx_PLUS (Pmode, new, new_offset);
3872 start_sequence ();
3873 temp = force_operand (new, NULL_RTX);
3874 seq = get_insns ();
3875 end_sequence ();
3877 emit_insns_before (seq, object);
3878 if (! validate_change (object, loc, temp, 0)
3879 && ! validate_replace_rtx (x, temp, object))
3880 abort ();
3884 return 1;
3887 /* Fall through to generic two-operand expression case. */
3888 case EXPR_LIST:
3889 case CALL:
3890 case COMPARE:
3891 case MINUS:
3892 case MULT:
3893 case DIV: case UDIV:
3894 case MOD: case UMOD:
3895 case AND: case IOR: case XOR:
3896 case ROTATERT: case ROTATE:
3897 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3898 case NE: case EQ:
3899 case GE: case GT: case GEU: case GTU:
3900 case LE: case LT: case LEU: case LTU:
3901 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3902 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3903 loc = &XEXP (x, 0);
3904 goto restart;
3906 case MEM:
3907 /* Most cases of MEM that convert to valid addresses have already been
3908 handled by our scan of decls. The only special handling we
3909 need here is to make a copy of the rtx to ensure it isn't being
3910 shared if we have to change it to a pseudo.
3912 If the rtx is a simple reference to an address via a virtual register,
3913 it can potentially be shared. In such cases, first try to make it
3914 a valid address, which can also be shared. Otherwise, copy it and
3915 proceed normally.
3917 First check for common cases that need no processing. These are
3918 usually due to instantiation already being done on a previous instance
3919 of a shared rtx. */
3921 temp = XEXP (x, 0);
3922 if (CONSTANT_ADDRESS_P (temp)
3923 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3924 || temp == arg_pointer_rtx
3925 #endif
3926 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3927 || temp == hard_frame_pointer_rtx
3928 #endif
3929 || temp == frame_pointer_rtx)
3930 return 1;
3932 if (GET_CODE (temp) == PLUS
3933 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3934 && (XEXP (temp, 0) == frame_pointer_rtx
3935 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3936 || XEXP (temp, 0) == hard_frame_pointer_rtx
3937 #endif
3938 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3939 || XEXP (temp, 0) == arg_pointer_rtx
3940 #endif
3942 return 1;
3944 if (temp == virtual_stack_vars_rtx
3945 || temp == virtual_incoming_args_rtx
3946 || (GET_CODE (temp) == PLUS
3947 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3948 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3949 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3951 /* This MEM may be shared. If the substitution can be done without
3952 the need to generate new pseudos, we want to do it in place
3953 so all copies of the shared rtx benefit. The call below will
3954 only make substitutions if the resulting address is still
3955 valid.
3957 Note that we cannot pass X as the object in the recursive call
3958 since the insn being processed may not allow all valid
3959 addresses. However, if we were not passed on object, we can
3960 only modify X without copying it if X will have a valid
3961 address.
3963 ??? Also note that this can still lose if OBJECT is an insn that
3964 has less restrictions on an address that some other insn.
3965 In that case, we will modify the shared address. This case
3966 doesn't seem very likely, though. One case where this could
3967 happen is in the case of a USE or CLOBBER reference, but we
3968 take care of that below. */
3970 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3971 object ? object : x, 0))
3972 return 1;
3974 /* Otherwise make a copy and process that copy. We copy the entire
3975 RTL expression since it might be a PLUS which could also be
3976 shared. */
3977 *loc = x = copy_rtx (x);
3980 /* Fall through to generic unary operation case. */
3981 case SUBREG:
3982 case STRICT_LOW_PART:
3983 case NEG: case NOT:
3984 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3985 case SIGN_EXTEND: case ZERO_EXTEND:
3986 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3987 case FLOAT: case FIX:
3988 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3989 case ABS:
3990 case SQRT:
3991 case FFS:
3992 /* These case either have just one operand or we know that we need not
3993 check the rest of the operands. */
3994 loc = &XEXP (x, 0);
3995 goto restart;
3997 case USE:
3998 case CLOBBER:
3999 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4000 go ahead and make the invalid one, but do it to a copy. For a REG,
4001 just make the recursive call, since there's no chance of a problem. */
4003 if ((GET_CODE (XEXP (x, 0)) == MEM
4004 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4006 || (GET_CODE (XEXP (x, 0)) == REG
4007 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4008 return 1;
4010 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4011 loc = &XEXP (x, 0);
4012 goto restart;
4014 case REG:
4015 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4016 in front of this insn and substitute the temporary. */
4017 if ((new = instantiate_new_reg (x, &offset)) != 0)
4019 temp = plus_constant (new, offset);
4020 if (!validate_change (object, loc, temp, 0))
4022 if (! extra_insns)
4023 return 0;
4025 start_sequence ();
4026 temp = force_operand (temp, NULL_RTX);
4027 seq = get_insns ();
4028 end_sequence ();
4030 emit_insns_before (seq, object);
4031 if (! validate_change (object, loc, temp, 0)
4032 && ! validate_replace_rtx (x, temp, object))
4033 abort ();
4037 return 1;
4039 case ADDRESSOF:
4040 if (GET_CODE (XEXP (x, 0)) == REG)
4041 return 1;
4043 else if (GET_CODE (XEXP (x, 0)) == MEM)
4045 /* If we have a (addressof (mem ..)), do any instantiation inside
4046 since we know we'll be making the inside valid when we finally
4047 remove the ADDRESSOF. */
4048 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4049 return 1;
4051 break;
4053 default:
4054 break;
4057 /* Scan all subexpressions. */
4058 fmt = GET_RTX_FORMAT (code);
4059 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4060 if (*fmt == 'e')
4062 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4063 return 0;
4065 else if (*fmt == 'E')
4066 for (j = 0; j < XVECLEN (x, i); j++)
4067 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4068 extra_insns))
4069 return 0;
4071 return 1;
4074 /* Optimization: assuming this function does not receive nonlocal gotos,
4075 delete the handlers for such, as well as the insns to establish
4076 and disestablish them. */
4078 static void
4079 delete_handlers ()
4081 rtx insn;
4082 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4084 /* Delete the handler by turning off the flag that would
4085 prevent jump_optimize from deleting it.
4086 Also permit deletion of the nonlocal labels themselves
4087 if nothing local refers to them. */
4088 if (GET_CODE (insn) == CODE_LABEL)
4090 tree t, last_t;
4092 LABEL_PRESERVE_P (insn) = 0;
4094 /* Remove it from the nonlocal_label list, to avoid confusing
4095 flow. */
4096 for (t = nonlocal_labels, last_t = 0; t;
4097 last_t = t, t = TREE_CHAIN (t))
4098 if (DECL_RTL (TREE_VALUE (t)) == insn)
4099 break;
4100 if (t)
4102 if (! last_t)
4103 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4104 else
4105 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4108 if (GET_CODE (insn) == INSN)
4110 int can_delete = 0;
4111 rtx t;
4112 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4113 if (reg_mentioned_p (t, PATTERN (insn)))
4115 can_delete = 1;
4116 break;
4118 if (can_delete
4119 || (nonlocal_goto_stack_level != 0
4120 && reg_mentioned_p (nonlocal_goto_stack_level,
4121 PATTERN (insn))))
4122 delete_related_insns (insn);
4128 max_parm_reg_num ()
4130 return max_parm_reg;
4133 /* Return the first insn following those generated by `assign_parms'. */
4136 get_first_nonparm_insn ()
4138 if (last_parm_insn)
4139 return NEXT_INSN (last_parm_insn);
4140 return get_insns ();
4143 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4144 Crash if there is none. */
4147 get_first_block_beg ()
4149 rtx searcher;
4150 rtx insn = get_first_nonparm_insn ();
4152 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4153 if (GET_CODE (searcher) == NOTE
4154 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4155 return searcher;
4157 abort (); /* Invalid call to this function. (See comments above.) */
4158 return NULL_RTX;
4161 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4162 This means a type for which function calls must pass an address to the
4163 function or get an address back from the function.
4164 EXP may be a type node or an expression (whose type is tested). */
4167 aggregate_value_p (exp)
4168 tree exp;
4170 int i, regno, nregs;
4171 rtx reg;
4173 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4175 if (TREE_CODE (type) == VOID_TYPE)
4176 return 0;
4177 if (RETURN_IN_MEMORY (type))
4178 return 1;
4179 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4180 and thus can't be returned in registers. */
4181 if (TREE_ADDRESSABLE (type))
4182 return 1;
4183 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4184 return 1;
4185 /* Make sure we have suitable call-clobbered regs to return
4186 the value in; if not, we must return it in memory. */
4187 reg = hard_function_value (type, 0, 0);
4189 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4190 it is OK. */
4191 if (GET_CODE (reg) != REG)
4192 return 0;
4194 regno = REGNO (reg);
4195 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4196 for (i = 0; i < nregs; i++)
4197 if (! call_used_regs[regno + i])
4198 return 1;
4199 return 0;
4202 /* Assign RTL expressions to the function's parameters.
4203 This may involve copying them into registers and using
4204 those registers as the RTL for them. */
4206 void
4207 assign_parms (fndecl)
4208 tree fndecl;
4210 tree parm;
4211 rtx entry_parm = 0;
4212 rtx stack_parm = 0;
4213 CUMULATIVE_ARGS args_so_far;
4214 enum machine_mode promoted_mode, passed_mode;
4215 enum machine_mode nominal_mode, promoted_nominal_mode;
4216 int unsignedp;
4217 /* Total space needed so far for args on the stack,
4218 given as a constant and a tree-expression. */
4219 struct args_size stack_args_size;
4220 tree fntype = TREE_TYPE (fndecl);
4221 tree fnargs = DECL_ARGUMENTS (fndecl);
4222 /* This is used for the arg pointer when referring to stack args. */
4223 rtx internal_arg_pointer;
4224 /* This is a dummy PARM_DECL that we used for the function result if
4225 the function returns a structure. */
4226 tree function_result_decl = 0;
4227 #ifdef SETUP_INCOMING_VARARGS
4228 int varargs_setup = 0;
4229 #endif
4230 rtx conversion_insns = 0;
4231 struct args_size alignment_pad;
4233 /* Nonzero if the last arg is named `__builtin_va_alist',
4234 which is used on some machines for old-fashioned non-ANSI varargs.h;
4235 this should be stuck onto the stack as if it had arrived there. */
4236 int hide_last_arg
4237 = (current_function_varargs
4238 && fnargs
4239 && (parm = tree_last (fnargs)) != 0
4240 && DECL_NAME (parm)
4241 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4242 "__builtin_va_alist")));
4244 /* Nonzero if function takes extra anonymous args.
4245 This means the last named arg must be on the stack
4246 right before the anonymous ones. */
4247 int stdarg
4248 = (TYPE_ARG_TYPES (fntype) != 0
4249 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4250 != void_type_node));
4252 current_function_stdarg = stdarg;
4254 /* If the reg that the virtual arg pointer will be translated into is
4255 not a fixed reg or is the stack pointer, make a copy of the virtual
4256 arg pointer, and address parms via the copy. The frame pointer is
4257 considered fixed even though it is not marked as such.
4259 The second time through, simply use ap to avoid generating rtx. */
4261 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4262 || ! (fixed_regs[ARG_POINTER_REGNUM]
4263 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4264 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4265 else
4266 internal_arg_pointer = virtual_incoming_args_rtx;
4267 current_function_internal_arg_pointer = internal_arg_pointer;
4269 stack_args_size.constant = 0;
4270 stack_args_size.var = 0;
4272 /* If struct value address is treated as the first argument, make it so. */
4273 if (aggregate_value_p (DECL_RESULT (fndecl))
4274 && ! current_function_returns_pcc_struct
4275 && struct_value_incoming_rtx == 0)
4277 tree type = build_pointer_type (TREE_TYPE (fntype));
4279 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4281 DECL_ARG_TYPE (function_result_decl) = type;
4282 TREE_CHAIN (function_result_decl) = fnargs;
4283 fnargs = function_result_decl;
4286 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4287 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4289 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4290 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4291 #else
4292 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4293 #endif
4295 /* We haven't yet found an argument that we must push and pretend the
4296 caller did. */
4297 current_function_pretend_args_size = 0;
4299 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4301 struct args_size stack_offset;
4302 struct args_size arg_size;
4303 int passed_pointer = 0;
4304 int did_conversion = 0;
4305 tree passed_type = DECL_ARG_TYPE (parm);
4306 tree nominal_type = TREE_TYPE (parm);
4307 int pretend_named;
4309 /* Set LAST_NAMED if this is last named arg before some
4310 anonymous args. */
4311 int last_named = ((TREE_CHAIN (parm) == 0
4312 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4313 && (stdarg || current_function_varargs));
4314 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4315 most machines, if this is a varargs/stdarg function, then we treat
4316 the last named arg as if it were anonymous too. */
4317 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4319 if (TREE_TYPE (parm) == error_mark_node
4320 /* This can happen after weird syntax errors
4321 or if an enum type is defined among the parms. */
4322 || TREE_CODE (parm) != PARM_DECL
4323 || passed_type == NULL)
4325 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4326 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4327 TREE_USED (parm) = 1;
4328 continue;
4331 /* For varargs.h function, save info about regs and stack space
4332 used by the individual args, not including the va_alist arg. */
4333 if (hide_last_arg && last_named)
4334 current_function_args_info = args_so_far;
4336 /* Find mode of arg as it is passed, and mode of arg
4337 as it should be during execution of this function. */
4338 passed_mode = TYPE_MODE (passed_type);
4339 nominal_mode = TYPE_MODE (nominal_type);
4341 /* If the parm's mode is VOID, its value doesn't matter,
4342 and avoid the usual things like emit_move_insn that could crash. */
4343 if (nominal_mode == VOIDmode)
4345 SET_DECL_RTL (parm, const0_rtx);
4346 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4347 continue;
4350 /* If the parm is to be passed as a transparent union, use the
4351 type of the first field for the tests below. We have already
4352 verified that the modes are the same. */
4353 if (DECL_TRANSPARENT_UNION (parm)
4354 || (TREE_CODE (passed_type) == UNION_TYPE
4355 && TYPE_TRANSPARENT_UNION (passed_type)))
4356 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4358 /* See if this arg was passed by invisible reference. It is if
4359 it is an object whose size depends on the contents of the
4360 object itself or if the machine requires these objects be passed
4361 that way. */
4363 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4364 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4365 || TREE_ADDRESSABLE (passed_type)
4366 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4367 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4368 passed_type, named_arg)
4369 #endif
4372 passed_type = nominal_type = build_pointer_type (passed_type);
4373 passed_pointer = 1;
4374 passed_mode = nominal_mode = Pmode;
4377 promoted_mode = passed_mode;
4379 #ifdef PROMOTE_FUNCTION_ARGS
4380 /* Compute the mode in which the arg is actually extended to. */
4381 unsignedp = TREE_UNSIGNED (passed_type);
4382 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4383 #endif
4385 /* Let machine desc say which reg (if any) the parm arrives in.
4386 0 means it arrives on the stack. */
4387 #ifdef FUNCTION_INCOMING_ARG
4388 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4389 passed_type, named_arg);
4390 #else
4391 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4392 passed_type, named_arg);
4393 #endif
4395 if (entry_parm == 0)
4396 promoted_mode = passed_mode;
4398 #ifdef SETUP_INCOMING_VARARGS
4399 /* If this is the last named parameter, do any required setup for
4400 varargs or stdargs. We need to know about the case of this being an
4401 addressable type, in which case we skip the registers it
4402 would have arrived in.
4404 For stdargs, LAST_NAMED will be set for two parameters, the one that
4405 is actually the last named, and the dummy parameter. We only
4406 want to do this action once.
4408 Also, indicate when RTL generation is to be suppressed. */
4409 if (last_named && !varargs_setup)
4411 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4412 current_function_pretend_args_size, 0);
4413 varargs_setup = 1;
4415 #endif
4417 /* Determine parm's home in the stack,
4418 in case it arrives in the stack or we should pretend it did.
4420 Compute the stack position and rtx where the argument arrives
4421 and its size.
4423 There is one complexity here: If this was a parameter that would
4424 have been passed in registers, but wasn't only because it is
4425 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4426 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4427 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4428 0 as it was the previous time. */
4430 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4431 locate_and_pad_parm (promoted_mode, passed_type,
4432 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4434 #else
4435 #ifdef FUNCTION_INCOMING_ARG
4436 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4437 passed_type,
4438 pretend_named) != 0,
4439 #else
4440 FUNCTION_ARG (args_so_far, promoted_mode,
4441 passed_type,
4442 pretend_named) != 0,
4443 #endif
4444 #endif
4445 fndecl, &stack_args_size, &stack_offset, &arg_size,
4446 &alignment_pad);
4449 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4451 if (offset_rtx == const0_rtx)
4452 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4453 else
4454 stack_parm = gen_rtx_MEM (promoted_mode,
4455 gen_rtx_PLUS (Pmode,
4456 internal_arg_pointer,
4457 offset_rtx));
4459 set_mem_attributes (stack_parm, parm, 1);
4462 /* If this parameter was passed both in registers and in the stack,
4463 use the copy on the stack. */
4464 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4465 entry_parm = 0;
4467 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4468 /* If this parm was passed part in regs and part in memory,
4469 pretend it arrived entirely in memory
4470 by pushing the register-part onto the stack.
4472 In the special case of a DImode or DFmode that is split,
4473 we could put it together in a pseudoreg directly,
4474 but for now that's not worth bothering with. */
4476 if (entry_parm)
4478 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4479 passed_type, named_arg);
4481 if (nregs > 0)
4483 current_function_pretend_args_size
4484 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4485 / (PARM_BOUNDARY / BITS_PER_UNIT)
4486 * (PARM_BOUNDARY / BITS_PER_UNIT));
4488 /* Handle calls that pass values in multiple non-contiguous
4489 locations. The Irix 6 ABI has examples of this. */
4490 if (GET_CODE (entry_parm) == PARALLEL)
4491 emit_group_store (validize_mem (stack_parm), entry_parm,
4492 int_size_in_bytes (TREE_TYPE (parm)));
4494 else
4495 move_block_from_reg (REGNO (entry_parm),
4496 validize_mem (stack_parm), nregs,
4497 int_size_in_bytes (TREE_TYPE (parm)));
4499 entry_parm = stack_parm;
4502 #endif
4504 /* If we didn't decide this parm came in a register,
4505 by default it came on the stack. */
4506 if (entry_parm == 0)
4507 entry_parm = stack_parm;
4509 /* Record permanently how this parm was passed. */
4510 DECL_INCOMING_RTL (parm) = entry_parm;
4512 /* If there is actually space on the stack for this parm,
4513 count it in stack_args_size; otherwise set stack_parm to 0
4514 to indicate there is no preallocated stack slot for the parm. */
4516 if (entry_parm == stack_parm
4517 || (GET_CODE (entry_parm) == PARALLEL
4518 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4519 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4520 /* On some machines, even if a parm value arrives in a register
4521 there is still an (uninitialized) stack slot allocated for it.
4523 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4524 whether this parameter already has a stack slot allocated,
4525 because an arg block exists only if current_function_args_size
4526 is larger than some threshold, and we haven't calculated that
4527 yet. So, for now, we just assume that stack slots never exist
4528 in this case. */
4529 || REG_PARM_STACK_SPACE (fndecl) > 0
4530 #endif
4533 stack_args_size.constant += arg_size.constant;
4534 if (arg_size.var)
4535 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4537 else
4538 /* No stack slot was pushed for this parm. */
4539 stack_parm = 0;
4541 /* Update info on where next arg arrives in registers. */
4543 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4544 passed_type, named_arg);
4546 /* If we can't trust the parm stack slot to be aligned enough
4547 for its ultimate type, don't use that slot after entry.
4548 We'll make another stack slot, if we need one. */
4550 unsigned int thisparm_boundary
4551 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4553 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4554 stack_parm = 0;
4557 /* If parm was passed in memory, and we need to convert it on entry,
4558 don't store it back in that same slot. */
4559 if (entry_parm != 0
4560 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4561 stack_parm = 0;
4563 /* When an argument is passed in multiple locations, we can't
4564 make use of this information, but we can save some copying if
4565 the whole argument is passed in a single register. */
4566 if (GET_CODE (entry_parm) == PARALLEL
4567 && nominal_mode != BLKmode && passed_mode != BLKmode)
4569 int i, len = XVECLEN (entry_parm, 0);
4571 for (i = 0; i < len; i++)
4572 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4573 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4574 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4575 == passed_mode)
4576 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4578 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4579 DECL_INCOMING_RTL (parm) = entry_parm;
4580 break;
4584 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4585 in the mode in which it arrives.
4586 STACK_PARM is an RTX for a stack slot where the parameter can live
4587 during the function (in case we want to put it there).
4588 STACK_PARM is 0 if no stack slot was pushed for it.
4590 Now output code if necessary to convert ENTRY_PARM to
4591 the type in which this function declares it,
4592 and store that result in an appropriate place,
4593 which may be a pseudo reg, may be STACK_PARM,
4594 or may be a local stack slot if STACK_PARM is 0.
4596 Set DECL_RTL to that place. */
4598 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4600 /* If a BLKmode arrives in registers, copy it to a stack slot.
4601 Handle calls that pass values in multiple non-contiguous
4602 locations. The Irix 6 ABI has examples of this. */
4603 if (GET_CODE (entry_parm) == REG
4604 || GET_CODE (entry_parm) == PARALLEL)
4606 int size_stored
4607 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4608 UNITS_PER_WORD);
4610 /* Note that we will be storing an integral number of words.
4611 So we have to be careful to ensure that we allocate an
4612 integral number of words. We do this below in the
4613 assign_stack_local if space was not allocated in the argument
4614 list. If it was, this will not work if PARM_BOUNDARY is not
4615 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4616 if it becomes a problem. */
4618 if (stack_parm == 0)
4620 stack_parm
4621 = assign_stack_local (GET_MODE (entry_parm),
4622 size_stored, 0);
4623 set_mem_attributes (stack_parm, parm, 1);
4626 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4627 abort ();
4629 /* Handle calls that pass values in multiple non-contiguous
4630 locations. The Irix 6 ABI has examples of this. */
4631 if (GET_CODE (entry_parm) == PARALLEL)
4632 emit_group_store (validize_mem (stack_parm), entry_parm,
4633 int_size_in_bytes (TREE_TYPE (parm)));
4634 else
4635 move_block_from_reg (REGNO (entry_parm),
4636 validize_mem (stack_parm),
4637 size_stored / UNITS_PER_WORD,
4638 int_size_in_bytes (TREE_TYPE (parm)));
4640 SET_DECL_RTL (parm, stack_parm);
4642 else if (! ((! optimize
4643 && ! DECL_REGISTER (parm)
4644 && ! DECL_INLINE (fndecl))
4645 || TREE_SIDE_EFFECTS (parm)
4646 /* If -ffloat-store specified, don't put explicit
4647 float variables into registers. */
4648 || (flag_float_store
4649 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4650 /* Always assign pseudo to structure return or item passed
4651 by invisible reference. */
4652 || passed_pointer || parm == function_result_decl)
4654 /* Store the parm in a pseudoregister during the function, but we
4655 may need to do it in a wider mode. */
4657 rtx parmreg;
4658 unsigned int regno, regnoi = 0, regnor = 0;
4660 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4662 promoted_nominal_mode
4663 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4665 parmreg = gen_reg_rtx (promoted_nominal_mode);
4666 mark_user_reg (parmreg);
4668 /* If this was an item that we received a pointer to, set DECL_RTL
4669 appropriately. */
4670 if (passed_pointer)
4672 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4673 parmreg);
4674 set_mem_attributes (x, parm, 1);
4675 SET_DECL_RTL (parm, x);
4677 else
4679 SET_DECL_RTL (parm, parmreg);
4680 maybe_set_unchanging (DECL_RTL (parm), parm);
4683 /* Copy the value into the register. */
4684 if (nominal_mode != passed_mode
4685 || promoted_nominal_mode != promoted_mode)
4687 int save_tree_used;
4688 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4689 mode, by the caller. We now have to convert it to
4690 NOMINAL_MODE, if different. However, PARMREG may be in
4691 a different mode than NOMINAL_MODE if it is being stored
4692 promoted.
4694 If ENTRY_PARM is a hard register, it might be in a register
4695 not valid for operating in its mode (e.g., an odd-numbered
4696 register for a DFmode). In that case, moves are the only
4697 thing valid, so we can't do a convert from there. This
4698 occurs when the calling sequence allow such misaligned
4699 usages.
4701 In addition, the conversion may involve a call, which could
4702 clobber parameters which haven't been copied to pseudo
4703 registers yet. Therefore, we must first copy the parm to
4704 a pseudo reg here, and save the conversion until after all
4705 parameters have been moved. */
4707 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4709 emit_move_insn (tempreg, validize_mem (entry_parm));
4711 push_to_sequence (conversion_insns);
4712 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4714 if (GET_CODE (tempreg) == SUBREG
4715 && GET_MODE (tempreg) == nominal_mode
4716 && GET_CODE (SUBREG_REG (tempreg)) == REG
4717 && nominal_mode == passed_mode
4718 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4719 && GET_MODE_SIZE (GET_MODE (tempreg))
4720 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4722 /* The argument is already sign/zero extended, so note it
4723 into the subreg. */
4724 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4725 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4728 /* TREE_USED gets set erroneously during expand_assignment. */
4729 save_tree_used = TREE_USED (parm);
4730 expand_assignment (parm,
4731 make_tree (nominal_type, tempreg), 0, 0);
4732 TREE_USED (parm) = save_tree_used;
4733 conversion_insns = get_insns ();
4734 did_conversion = 1;
4735 end_sequence ();
4737 else
4738 emit_move_insn (parmreg, validize_mem (entry_parm));
4740 /* If we were passed a pointer but the actual value
4741 can safely live in a register, put it in one. */
4742 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4743 && ! ((! optimize
4744 && ! DECL_REGISTER (parm)
4745 && ! DECL_INLINE (fndecl))
4746 || TREE_SIDE_EFFECTS (parm)
4747 /* If -ffloat-store specified, don't put explicit
4748 float variables into registers. */
4749 || (flag_float_store
4750 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4752 /* We can't use nominal_mode, because it will have been set to
4753 Pmode above. We must use the actual mode of the parm. */
4754 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4755 mark_user_reg (parmreg);
4756 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4758 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4759 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4760 push_to_sequence (conversion_insns);
4761 emit_move_insn (tempreg, DECL_RTL (parm));
4762 SET_DECL_RTL (parm,
4763 convert_to_mode (GET_MODE (parmreg),
4764 tempreg,
4765 unsigned_p));
4766 emit_move_insn (parmreg, DECL_RTL (parm));
4767 conversion_insns = get_insns();
4768 did_conversion = 1;
4769 end_sequence ();
4771 else
4772 emit_move_insn (parmreg, DECL_RTL (parm));
4773 SET_DECL_RTL (parm, parmreg);
4774 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4775 now the parm. */
4776 stack_parm = 0;
4778 #ifdef FUNCTION_ARG_CALLEE_COPIES
4779 /* If we are passed an arg by reference and it is our responsibility
4780 to make a copy, do it now.
4781 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4782 original argument, so we must recreate them in the call to
4783 FUNCTION_ARG_CALLEE_COPIES. */
4784 /* ??? Later add code to handle the case that if the argument isn't
4785 modified, don't do the copy. */
4787 else if (passed_pointer
4788 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4789 TYPE_MODE (DECL_ARG_TYPE (parm)),
4790 DECL_ARG_TYPE (parm),
4791 named_arg)
4792 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4794 rtx copy;
4795 tree type = DECL_ARG_TYPE (parm);
4797 /* This sequence may involve a library call perhaps clobbering
4798 registers that haven't been copied to pseudos yet. */
4800 push_to_sequence (conversion_insns);
4802 if (!COMPLETE_TYPE_P (type)
4803 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4804 /* This is a variable sized object. */
4805 copy = gen_rtx_MEM (BLKmode,
4806 allocate_dynamic_stack_space
4807 (expr_size (parm), NULL_RTX,
4808 TYPE_ALIGN (type)));
4809 else
4810 copy = assign_stack_temp (TYPE_MODE (type),
4811 int_size_in_bytes (type), 1);
4812 set_mem_attributes (copy, parm, 1);
4814 store_expr (parm, copy, 0);
4815 emit_move_insn (parmreg, XEXP (copy, 0));
4816 if (current_function_check_memory_usage)
4817 emit_library_call (chkr_set_right_libfunc,
4818 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4819 XEXP (copy, 0), Pmode,
4820 GEN_INT (int_size_in_bytes (type)),
4821 TYPE_MODE (sizetype),
4822 GEN_INT (MEMORY_USE_RW),
4823 TYPE_MODE (integer_type_node));
4824 conversion_insns = get_insns ();
4825 did_conversion = 1;
4826 end_sequence ();
4828 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4830 /* In any case, record the parm's desired stack location
4831 in case we later discover it must live in the stack.
4833 If it is a COMPLEX value, store the stack location for both
4834 halves. */
4836 if (GET_CODE (parmreg) == CONCAT)
4837 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4838 else
4839 regno = REGNO (parmreg);
4841 if (regno >= max_parm_reg)
4843 rtx *new;
4844 int old_max_parm_reg = max_parm_reg;
4846 /* It's slow to expand this one register at a time,
4847 but it's also rare and we need max_parm_reg to be
4848 precisely correct. */
4849 max_parm_reg = regno + 1;
4850 new = (rtx *) xrealloc (parm_reg_stack_loc,
4851 max_parm_reg * sizeof (rtx));
4852 memset ((char *) (new + old_max_parm_reg), 0,
4853 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4854 parm_reg_stack_loc = new;
4857 if (GET_CODE (parmreg) == CONCAT)
4859 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4861 regnor = REGNO (gen_realpart (submode, parmreg));
4862 regnoi = REGNO (gen_imagpart (submode, parmreg));
4864 if (stack_parm != 0)
4866 parm_reg_stack_loc[regnor]
4867 = gen_realpart (submode, stack_parm);
4868 parm_reg_stack_loc[regnoi]
4869 = gen_imagpart (submode, stack_parm);
4871 else
4873 parm_reg_stack_loc[regnor] = 0;
4874 parm_reg_stack_loc[regnoi] = 0;
4877 else
4878 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4880 /* Mark the register as eliminable if we did no conversion
4881 and it was copied from memory at a fixed offset,
4882 and the arg pointer was not copied to a pseudo-reg.
4883 If the arg pointer is a pseudo reg or the offset formed
4884 an invalid address, such memory-equivalences
4885 as we make here would screw up life analysis for it. */
4886 if (nominal_mode == passed_mode
4887 && ! did_conversion
4888 && stack_parm != 0
4889 && GET_CODE (stack_parm) == MEM
4890 && stack_offset.var == 0
4891 && reg_mentioned_p (virtual_incoming_args_rtx,
4892 XEXP (stack_parm, 0)))
4894 rtx linsn = get_last_insn ();
4895 rtx sinsn, set;
4897 /* Mark complex types separately. */
4898 if (GET_CODE (parmreg) == CONCAT)
4899 /* Scan backwards for the set of the real and
4900 imaginary parts. */
4901 for (sinsn = linsn; sinsn != 0;
4902 sinsn = prev_nonnote_insn (sinsn))
4904 set = single_set (sinsn);
4905 if (set != 0
4906 && SET_DEST (set) == regno_reg_rtx [regnoi])
4907 REG_NOTES (sinsn)
4908 = gen_rtx_EXPR_LIST (REG_EQUIV,
4909 parm_reg_stack_loc[regnoi],
4910 REG_NOTES (sinsn));
4911 else if (set != 0
4912 && SET_DEST (set) == regno_reg_rtx [regnor])
4913 REG_NOTES (sinsn)
4914 = gen_rtx_EXPR_LIST (REG_EQUIV,
4915 parm_reg_stack_loc[regnor],
4916 REG_NOTES (sinsn));
4918 else if ((set = single_set (linsn)) != 0
4919 && SET_DEST (set) == parmreg)
4920 REG_NOTES (linsn)
4921 = gen_rtx_EXPR_LIST (REG_EQUIV,
4922 stack_parm, REG_NOTES (linsn));
4925 /* For pointer data type, suggest pointer register. */
4926 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4927 mark_reg_pointer (parmreg,
4928 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4930 /* If something wants our address, try to use ADDRESSOF. */
4931 if (TREE_ADDRESSABLE (parm))
4933 /* If we end up putting something into the stack,
4934 fixup_var_refs_insns will need to make a pass over
4935 all the instructions. It looks throughs the pending
4936 sequences -- but it can't see the ones in the
4937 CONVERSION_INSNS, if they're not on the sequence
4938 stack. So, we go back to that sequence, just so that
4939 the fixups will happen. */
4940 push_to_sequence (conversion_insns);
4941 put_var_into_stack (parm);
4942 conversion_insns = get_insns ();
4943 end_sequence ();
4946 else
4948 /* Value must be stored in the stack slot STACK_PARM
4949 during function execution. */
4951 if (promoted_mode != nominal_mode)
4953 /* Conversion is required. */
4954 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4956 emit_move_insn (tempreg, validize_mem (entry_parm));
4958 push_to_sequence (conversion_insns);
4959 entry_parm = convert_to_mode (nominal_mode, tempreg,
4960 TREE_UNSIGNED (TREE_TYPE (parm)));
4961 if (stack_parm)
4962 /* ??? This may need a big-endian conversion on sparc64. */
4963 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
4965 conversion_insns = get_insns ();
4966 did_conversion = 1;
4967 end_sequence ();
4970 if (entry_parm != stack_parm)
4972 if (stack_parm == 0)
4974 stack_parm
4975 = assign_stack_local (GET_MODE (entry_parm),
4976 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4977 set_mem_attributes (stack_parm, parm, 1);
4980 if (promoted_mode != nominal_mode)
4982 push_to_sequence (conversion_insns);
4983 emit_move_insn (validize_mem (stack_parm),
4984 validize_mem (entry_parm));
4985 conversion_insns = get_insns ();
4986 end_sequence ();
4988 else
4989 emit_move_insn (validize_mem (stack_parm),
4990 validize_mem (entry_parm));
4992 if (current_function_check_memory_usage)
4994 push_to_sequence (conversion_insns);
4995 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
4996 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
4997 GEN_INT (GET_MODE_SIZE (GET_MODE
4998 (entry_parm))),
4999 TYPE_MODE (sizetype),
5000 GEN_INT (MEMORY_USE_RW),
5001 TYPE_MODE (integer_type_node));
5003 conversion_insns = get_insns ();
5004 end_sequence ();
5006 SET_DECL_RTL (parm, stack_parm);
5009 /* If this "parameter" was the place where we are receiving the
5010 function's incoming structure pointer, set up the result. */
5011 if (parm == function_result_decl)
5013 tree result = DECL_RESULT (fndecl);
5014 rtx addr = DECL_RTL (parm);
5015 rtx x;
5017 #ifdef POINTERS_EXTEND_UNSIGNED
5018 if (GET_MODE (addr) != Pmode)
5019 addr = convert_memory_address (Pmode, addr);
5020 #endif
5022 x = gen_rtx_MEM (DECL_MODE (result), addr);
5023 set_mem_attributes (x, result, 1);
5024 SET_DECL_RTL (result, x);
5027 if (GET_CODE (DECL_RTL (parm)) == REG)
5028 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5029 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5031 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5032 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5037 /* Output all parameter conversion instructions (possibly including calls)
5038 now that all parameters have been copied out of hard registers. */
5039 emit_insns (conversion_insns);
5041 last_parm_insn = get_last_insn ();
5043 current_function_args_size = stack_args_size.constant;
5045 /* Adjust function incoming argument size for alignment and
5046 minimum length. */
5048 #ifdef REG_PARM_STACK_SPACE
5049 #ifndef MAYBE_REG_PARM_STACK_SPACE
5050 current_function_args_size = MAX (current_function_args_size,
5051 REG_PARM_STACK_SPACE (fndecl));
5052 #endif
5053 #endif
5055 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5057 current_function_args_size
5058 = ((current_function_args_size + STACK_BYTES - 1)
5059 / STACK_BYTES) * STACK_BYTES;
5061 #ifdef ARGS_GROW_DOWNWARD
5062 current_function_arg_offset_rtx
5063 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5064 : expand_expr (size_diffop (stack_args_size.var,
5065 size_int (-stack_args_size.constant)),
5066 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5067 #else
5068 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5069 #endif
5071 /* See how many bytes, if any, of its args a function should try to pop
5072 on return. */
5074 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5075 current_function_args_size);
5077 /* For stdarg.h function, save info about
5078 regs and stack space used by the named args. */
5080 if (!hide_last_arg)
5081 current_function_args_info = args_so_far;
5083 /* Set the rtx used for the function return value. Put this in its
5084 own variable so any optimizers that need this information don't have
5085 to include tree.h. Do this here so it gets done when an inlined
5086 function gets output. */
5088 current_function_return_rtx
5089 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5090 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5093 /* Indicate whether REGNO is an incoming argument to the current function
5094 that was promoted to a wider mode. If so, return the RTX for the
5095 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5096 that REGNO is promoted from and whether the promotion was signed or
5097 unsigned. */
5099 #ifdef PROMOTE_FUNCTION_ARGS
5102 promoted_input_arg (regno, pmode, punsignedp)
5103 unsigned int regno;
5104 enum machine_mode *pmode;
5105 int *punsignedp;
5107 tree arg;
5109 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5110 arg = TREE_CHAIN (arg))
5111 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5112 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5113 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5115 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5116 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5118 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5119 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5120 && mode != DECL_MODE (arg))
5122 *pmode = DECL_MODE (arg);
5123 *punsignedp = unsignedp;
5124 return DECL_INCOMING_RTL (arg);
5128 return 0;
5131 #endif
5133 /* Compute the size and offset from the start of the stacked arguments for a
5134 parm passed in mode PASSED_MODE and with type TYPE.
5136 INITIAL_OFFSET_PTR points to the current offset into the stacked
5137 arguments.
5139 The starting offset and size for this parm are returned in *OFFSET_PTR
5140 and *ARG_SIZE_PTR, respectively.
5142 IN_REGS is non-zero if the argument will be passed in registers. It will
5143 never be set if REG_PARM_STACK_SPACE is not defined.
5145 FNDECL is the function in which the argument was defined.
5147 There are two types of rounding that are done. The first, controlled by
5148 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5149 list to be aligned to the specific boundary (in bits). This rounding
5150 affects the initial and starting offsets, but not the argument size.
5152 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5153 optionally rounds the size of the parm to PARM_BOUNDARY. The
5154 initial offset is not affected by this rounding, while the size always
5155 is and the starting offset may be. */
5157 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5158 initial_offset_ptr is positive because locate_and_pad_parm's
5159 callers pass in the total size of args so far as
5160 initial_offset_ptr. arg_size_ptr is always positive.*/
5162 void
5163 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5164 initial_offset_ptr, offset_ptr, arg_size_ptr,
5165 alignment_pad)
5166 enum machine_mode passed_mode;
5167 tree type;
5168 int in_regs ATTRIBUTE_UNUSED;
5169 tree fndecl ATTRIBUTE_UNUSED;
5170 struct args_size *initial_offset_ptr;
5171 struct args_size *offset_ptr;
5172 struct args_size *arg_size_ptr;
5173 struct args_size *alignment_pad;
5176 tree sizetree
5177 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5178 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5179 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5181 #ifdef REG_PARM_STACK_SPACE
5182 /* If we have found a stack parm before we reach the end of the
5183 area reserved for registers, skip that area. */
5184 if (! in_regs)
5186 int reg_parm_stack_space = 0;
5188 #ifdef MAYBE_REG_PARM_STACK_SPACE
5189 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5190 #else
5191 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5192 #endif
5193 if (reg_parm_stack_space > 0)
5195 if (initial_offset_ptr->var)
5197 initial_offset_ptr->var
5198 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5199 ssize_int (reg_parm_stack_space));
5200 initial_offset_ptr->constant = 0;
5202 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5203 initial_offset_ptr->constant = reg_parm_stack_space;
5206 #endif /* REG_PARM_STACK_SPACE */
5208 arg_size_ptr->var = 0;
5209 arg_size_ptr->constant = 0;
5210 alignment_pad->var = 0;
5211 alignment_pad->constant = 0;
5213 #ifdef ARGS_GROW_DOWNWARD
5214 if (initial_offset_ptr->var)
5216 offset_ptr->constant = 0;
5217 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5218 initial_offset_ptr->var);
5220 else
5222 offset_ptr->constant = -initial_offset_ptr->constant;
5223 offset_ptr->var = 0;
5225 if (where_pad != none
5226 && (!host_integerp (sizetree, 1)
5227 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5228 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5229 SUB_PARM_SIZE (*offset_ptr, sizetree);
5230 if (where_pad != downward)
5231 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5232 if (initial_offset_ptr->var)
5233 arg_size_ptr->var = size_binop (MINUS_EXPR,
5234 size_binop (MINUS_EXPR,
5235 ssize_int (0),
5236 initial_offset_ptr->var),
5237 offset_ptr->var);
5239 else
5240 arg_size_ptr->constant = (-initial_offset_ptr->constant
5241 - offset_ptr->constant);
5243 #else /* !ARGS_GROW_DOWNWARD */
5244 if (!in_regs
5245 #ifdef REG_PARM_STACK_SPACE
5246 || REG_PARM_STACK_SPACE (fndecl) > 0
5247 #endif
5249 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5250 *offset_ptr = *initial_offset_ptr;
5252 #ifdef PUSH_ROUNDING
5253 if (passed_mode != BLKmode)
5254 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5255 #endif
5257 /* Pad_below needs the pre-rounded size to know how much to pad below
5258 so this must be done before rounding up. */
5259 if (where_pad == downward
5260 /* However, BLKmode args passed in regs have their padding done elsewhere.
5261 The stack slot must be able to hold the entire register. */
5262 && !(in_regs && passed_mode == BLKmode))
5263 pad_below (offset_ptr, passed_mode, sizetree);
5265 if (where_pad != none
5266 && (!host_integerp (sizetree, 1)
5267 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5268 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5270 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5271 #endif /* ARGS_GROW_DOWNWARD */
5274 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5275 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5277 static void
5278 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5279 struct args_size *offset_ptr;
5280 int boundary;
5281 struct args_size *alignment_pad;
5283 tree save_var = NULL_TREE;
5284 HOST_WIDE_INT save_constant = 0;
5286 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5288 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5290 save_var = offset_ptr->var;
5291 save_constant = offset_ptr->constant;
5294 alignment_pad->var = NULL_TREE;
5295 alignment_pad->constant = 0;
5297 if (boundary > BITS_PER_UNIT)
5299 if (offset_ptr->var)
5301 offset_ptr->var =
5302 #ifdef ARGS_GROW_DOWNWARD
5303 round_down
5304 #else
5305 round_up
5306 #endif
5307 (ARGS_SIZE_TREE (*offset_ptr),
5308 boundary / BITS_PER_UNIT);
5309 offset_ptr->constant = 0; /*?*/
5310 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5311 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5312 save_var);
5314 else
5316 offset_ptr->constant =
5317 #ifdef ARGS_GROW_DOWNWARD
5318 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5319 #else
5320 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5321 #endif
5322 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5323 alignment_pad->constant = offset_ptr->constant - save_constant;
5328 #ifndef ARGS_GROW_DOWNWARD
5329 static void
5330 pad_below (offset_ptr, passed_mode, sizetree)
5331 struct args_size *offset_ptr;
5332 enum machine_mode passed_mode;
5333 tree sizetree;
5335 if (passed_mode != BLKmode)
5337 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5338 offset_ptr->constant
5339 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5340 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5341 - GET_MODE_SIZE (passed_mode));
5343 else
5345 if (TREE_CODE (sizetree) != INTEGER_CST
5346 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5348 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5349 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5350 /* Add it in. */
5351 ADD_PARM_SIZE (*offset_ptr, s2);
5352 SUB_PARM_SIZE (*offset_ptr, sizetree);
5356 #endif
5358 /* Walk the tree of blocks describing the binding levels within a function
5359 and warn about uninitialized variables.
5360 This is done after calling flow_analysis and before global_alloc
5361 clobbers the pseudo-regs to hard regs. */
5363 void
5364 uninitialized_vars_warning (block)
5365 tree block;
5367 tree decl, sub;
5368 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5370 if (warn_uninitialized
5371 && TREE_CODE (decl) == VAR_DECL
5372 /* These warnings are unreliable for and aggregates
5373 because assigning the fields one by one can fail to convince
5374 flow.c that the entire aggregate was initialized.
5375 Unions are troublesome because members may be shorter. */
5376 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5377 && DECL_RTL (decl) != 0
5378 && GET_CODE (DECL_RTL (decl)) == REG
5379 /* Global optimizations can make it difficult to determine if a
5380 particular variable has been initialized. However, a VAR_DECL
5381 with a nonzero DECL_INITIAL had an initializer, so do not
5382 claim it is potentially uninitialized.
5384 We do not care about the actual value in DECL_INITIAL, so we do
5385 not worry that it may be a dangling pointer. */
5386 && DECL_INITIAL (decl) == NULL_TREE
5387 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5388 warning_with_decl (decl,
5389 "`%s' might be used uninitialized in this function");
5390 if (extra_warnings
5391 && TREE_CODE (decl) == VAR_DECL
5392 && DECL_RTL (decl) != 0
5393 && GET_CODE (DECL_RTL (decl)) == REG
5394 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5395 warning_with_decl (decl,
5396 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5398 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5399 uninitialized_vars_warning (sub);
5402 /* Do the appropriate part of uninitialized_vars_warning
5403 but for arguments instead of local variables. */
5405 void
5406 setjmp_args_warning ()
5408 tree decl;
5409 for (decl = DECL_ARGUMENTS (current_function_decl);
5410 decl; decl = TREE_CHAIN (decl))
5411 if (DECL_RTL (decl) != 0
5412 && GET_CODE (DECL_RTL (decl)) == REG
5413 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5414 warning_with_decl (decl,
5415 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5418 /* If this function call setjmp, put all vars into the stack
5419 unless they were declared `register'. */
5421 void
5422 setjmp_protect (block)
5423 tree block;
5425 tree decl, sub;
5426 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5427 if ((TREE_CODE (decl) == VAR_DECL
5428 || TREE_CODE (decl) == PARM_DECL)
5429 && DECL_RTL (decl) != 0
5430 && (GET_CODE (DECL_RTL (decl)) == REG
5431 || (GET_CODE (DECL_RTL (decl)) == MEM
5432 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5433 /* If this variable came from an inline function, it must be
5434 that its life doesn't overlap the setjmp. If there was a
5435 setjmp in the function, it would already be in memory. We
5436 must exclude such variable because their DECL_RTL might be
5437 set to strange things such as virtual_stack_vars_rtx. */
5438 && ! DECL_FROM_INLINE (decl)
5439 && (
5440 #ifdef NON_SAVING_SETJMP
5441 /* If longjmp doesn't restore the registers,
5442 don't put anything in them. */
5443 NON_SAVING_SETJMP
5445 #endif
5446 ! DECL_REGISTER (decl)))
5447 put_var_into_stack (decl);
5448 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5449 setjmp_protect (sub);
5452 /* Like the previous function, but for args instead of local variables. */
5454 void
5455 setjmp_protect_args ()
5457 tree decl;
5458 for (decl = DECL_ARGUMENTS (current_function_decl);
5459 decl; decl = TREE_CHAIN (decl))
5460 if ((TREE_CODE (decl) == VAR_DECL
5461 || TREE_CODE (decl) == PARM_DECL)
5462 && DECL_RTL (decl) != 0
5463 && (GET_CODE (DECL_RTL (decl)) == REG
5464 || (GET_CODE (DECL_RTL (decl)) == MEM
5465 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5466 && (
5467 /* If longjmp doesn't restore the registers,
5468 don't put anything in them. */
5469 #ifdef NON_SAVING_SETJMP
5470 NON_SAVING_SETJMP
5472 #endif
5473 ! DECL_REGISTER (decl)))
5474 put_var_into_stack (decl);
5477 /* Return the context-pointer register corresponding to DECL,
5478 or 0 if it does not need one. */
5481 lookup_static_chain (decl)
5482 tree decl;
5484 tree context = decl_function_context (decl);
5485 tree link;
5487 if (context == 0
5488 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5489 return 0;
5491 /* We treat inline_function_decl as an alias for the current function
5492 because that is the inline function whose vars, types, etc.
5493 are being merged into the current function.
5494 See expand_inline_function. */
5495 if (context == current_function_decl || context == inline_function_decl)
5496 return virtual_stack_vars_rtx;
5498 for (link = context_display; link; link = TREE_CHAIN (link))
5499 if (TREE_PURPOSE (link) == context)
5500 return RTL_EXPR_RTL (TREE_VALUE (link));
5502 abort ();
5505 /* Convert a stack slot address ADDR for variable VAR
5506 (from a containing function)
5507 into an address valid in this function (using a static chain). */
5510 fix_lexical_addr (addr, var)
5511 rtx addr;
5512 tree var;
5514 rtx basereg;
5515 HOST_WIDE_INT displacement;
5516 tree context = decl_function_context (var);
5517 struct function *fp;
5518 rtx base = 0;
5520 /* If this is the present function, we need not do anything. */
5521 if (context == current_function_decl || context == inline_function_decl)
5522 return addr;
5524 fp = find_function_data (context);
5526 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5527 addr = XEXP (XEXP (addr, 0), 0);
5529 /* Decode given address as base reg plus displacement. */
5530 if (GET_CODE (addr) == REG)
5531 basereg = addr, displacement = 0;
5532 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5533 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5534 else
5535 abort ();
5537 /* We accept vars reached via the containing function's
5538 incoming arg pointer and via its stack variables pointer. */
5539 if (basereg == fp->internal_arg_pointer)
5541 /* If reached via arg pointer, get the arg pointer value
5542 out of that function's stack frame.
5544 There are two cases: If a separate ap is needed, allocate a
5545 slot in the outer function for it and dereference it that way.
5546 This is correct even if the real ap is actually a pseudo.
5547 Otherwise, just adjust the offset from the frame pointer to
5548 compensate. */
5550 #ifdef NEED_SEPARATE_AP
5551 rtx addr;
5553 addr = get_arg_pointer_save_area (fp);
5554 addr = fix_lexical_addr (XEXP (addr, 0), var);
5555 addr = memory_address (Pmode, addr);
5557 base = gen_rtx_MEM (Pmode, addr);
5558 set_mem_alias_set (base, get_frame_alias_set ());
5559 base = copy_to_reg (base);
5560 #else
5561 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5562 base = lookup_static_chain (var);
5563 #endif
5566 else if (basereg == virtual_stack_vars_rtx)
5568 /* This is the same code as lookup_static_chain, duplicated here to
5569 avoid an extra call to decl_function_context. */
5570 tree link;
5572 for (link = context_display; link; link = TREE_CHAIN (link))
5573 if (TREE_PURPOSE (link) == context)
5575 base = RTL_EXPR_RTL (TREE_VALUE (link));
5576 break;
5580 if (base == 0)
5581 abort ();
5583 /* Use same offset, relative to appropriate static chain or argument
5584 pointer. */
5585 return plus_constant (base, displacement);
5588 /* Return the address of the trampoline for entering nested fn FUNCTION.
5589 If necessary, allocate a trampoline (in the stack frame)
5590 and emit rtl to initialize its contents (at entry to this function). */
5593 trampoline_address (function)
5594 tree function;
5596 tree link;
5597 tree rtlexp;
5598 rtx tramp;
5599 struct function *fp;
5600 tree fn_context;
5602 /* Find an existing trampoline and return it. */
5603 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5604 if (TREE_PURPOSE (link) == function)
5605 return
5606 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5608 for (fp = outer_function_chain; fp; fp = fp->outer)
5609 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5610 if (TREE_PURPOSE (link) == function)
5612 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5613 function);
5614 return adjust_trampoline_addr (tramp);
5617 /* None exists; we must make one. */
5619 /* Find the `struct function' for the function containing FUNCTION. */
5620 fp = 0;
5621 fn_context = decl_function_context (function);
5622 if (fn_context != current_function_decl
5623 && fn_context != inline_function_decl)
5624 fp = find_function_data (fn_context);
5626 /* Allocate run-time space for this trampoline
5627 (usually in the defining function's stack frame). */
5628 #ifdef ALLOCATE_TRAMPOLINE
5629 tramp = ALLOCATE_TRAMPOLINE (fp);
5630 #else
5631 /* If rounding needed, allocate extra space
5632 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5633 #ifdef TRAMPOLINE_ALIGNMENT
5634 #define TRAMPOLINE_REAL_SIZE \
5635 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5636 #else
5637 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5638 #endif
5639 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5640 fp ? fp : cfun);
5641 #endif
5643 /* Record the trampoline for reuse and note it for later initialization
5644 by expand_function_end. */
5645 if (fp != 0)
5647 rtlexp = make_node (RTL_EXPR);
5648 RTL_EXPR_RTL (rtlexp) = tramp;
5649 fp->x_trampoline_list = tree_cons (function, rtlexp,
5650 fp->x_trampoline_list);
5652 else
5654 /* Make the RTL_EXPR node temporary, not momentary, so that the
5655 trampoline_list doesn't become garbage. */
5656 rtlexp = make_node (RTL_EXPR);
5658 RTL_EXPR_RTL (rtlexp) = tramp;
5659 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5662 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5663 return adjust_trampoline_addr (tramp);
5666 /* Given a trampoline address,
5667 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5669 static rtx
5670 round_trampoline_addr (tramp)
5671 rtx tramp;
5673 #ifdef TRAMPOLINE_ALIGNMENT
5674 /* Round address up to desired boundary. */
5675 rtx temp = gen_reg_rtx (Pmode);
5676 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5677 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5679 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5680 temp, 0, OPTAB_LIB_WIDEN);
5681 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5682 temp, 0, OPTAB_LIB_WIDEN);
5683 #endif
5684 return tramp;
5687 /* Given a trampoline address, round it then apply any
5688 platform-specific adjustments so that the result can be used for a
5689 function call . */
5691 static rtx
5692 adjust_trampoline_addr (tramp)
5693 rtx tramp;
5695 tramp = round_trampoline_addr (tramp);
5696 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5697 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5698 #endif
5699 return tramp;
5702 /* Put all this function's BLOCK nodes including those that are chained
5703 onto the first block into a vector, and return it.
5704 Also store in each NOTE for the beginning or end of a block
5705 the index of that block in the vector.
5706 The arguments are BLOCK, the chain of top-level blocks of the function,
5707 and INSNS, the insn chain of the function. */
5709 void
5710 identify_blocks ()
5712 int n_blocks;
5713 tree *block_vector, *last_block_vector;
5714 tree *block_stack;
5715 tree block = DECL_INITIAL (current_function_decl);
5717 if (block == 0)
5718 return;
5720 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5721 depth-first order. */
5722 block_vector = get_block_vector (block, &n_blocks);
5723 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5725 last_block_vector = identify_blocks_1 (get_insns (),
5726 block_vector + 1,
5727 block_vector + n_blocks,
5728 block_stack);
5730 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5731 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5732 if (0 && last_block_vector != block_vector + n_blocks)
5733 abort ();
5735 free (block_vector);
5736 free (block_stack);
5739 /* Subroutine of identify_blocks. Do the block substitution on the
5740 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5742 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5743 BLOCK_VECTOR is incremented for each block seen. */
5745 static tree *
5746 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5747 rtx insns;
5748 tree *block_vector;
5749 tree *end_block_vector;
5750 tree *orig_block_stack;
5752 rtx insn;
5753 tree *block_stack = orig_block_stack;
5755 for (insn = insns; insn; insn = NEXT_INSN (insn))
5757 if (GET_CODE (insn) == NOTE)
5759 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5761 tree b;
5763 /* If there are more block notes than BLOCKs, something
5764 is badly wrong. */
5765 if (block_vector == end_block_vector)
5766 abort ();
5768 b = *block_vector++;
5769 NOTE_BLOCK (insn) = b;
5770 *block_stack++ = b;
5772 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5774 /* If there are more NOTE_INSN_BLOCK_ENDs than
5775 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5776 if (block_stack == orig_block_stack)
5777 abort ();
5779 NOTE_BLOCK (insn) = *--block_stack;
5782 else if (GET_CODE (insn) == CALL_INSN
5783 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5785 rtx cp = PATTERN (insn);
5787 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5788 end_block_vector, block_stack);
5789 if (XEXP (cp, 1))
5790 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5791 end_block_vector, block_stack);
5792 if (XEXP (cp, 2))
5793 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5794 end_block_vector, block_stack);
5798 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5799 something is badly wrong. */
5800 if (block_stack != orig_block_stack)
5801 abort ();
5803 return block_vector;
5806 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5807 and create duplicate blocks. */
5808 /* ??? Need an option to either create block fragments or to create
5809 abstract origin duplicates of a source block. It really depends
5810 on what optimization has been performed. */
5812 void
5813 reorder_blocks ()
5815 tree block = DECL_INITIAL (current_function_decl);
5816 varray_type block_stack;
5818 if (block == NULL_TREE)
5819 return;
5821 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5823 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5824 reorder_blocks_0 (block);
5826 /* Prune the old trees away, so that they don't get in the way. */
5827 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5828 BLOCK_CHAIN (block) = NULL_TREE;
5830 /* Recreate the block tree from the note nesting. */
5831 reorder_blocks_1 (get_insns (), block, &block_stack);
5832 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5834 /* Remove deleted blocks from the block fragment chains. */
5835 reorder_fix_fragments (block);
5837 VARRAY_FREE (block_stack);
5840 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5842 static void
5843 reorder_blocks_0 (block)
5844 tree block;
5846 while (block)
5848 TREE_ASM_WRITTEN (block) = 0;
5849 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5850 block = BLOCK_CHAIN (block);
5854 static void
5855 reorder_blocks_1 (insns, current_block, p_block_stack)
5856 rtx insns;
5857 tree current_block;
5858 varray_type *p_block_stack;
5860 rtx insn;
5862 for (insn = insns; insn; insn = NEXT_INSN (insn))
5864 if (GET_CODE (insn) == NOTE)
5866 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5868 tree block = NOTE_BLOCK (insn);
5870 /* If we have seen this block before, that means it now
5871 spans multiple address regions. Create a new fragment. */
5872 if (TREE_ASM_WRITTEN (block))
5874 tree new_block = copy_node (block);
5875 tree origin;
5877 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5878 ? BLOCK_FRAGMENT_ORIGIN (block)
5879 : block);
5880 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5881 BLOCK_FRAGMENT_CHAIN (new_block)
5882 = BLOCK_FRAGMENT_CHAIN (origin);
5883 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5885 NOTE_BLOCK (insn) = new_block;
5886 block = new_block;
5889 BLOCK_SUBBLOCKS (block) = 0;
5890 TREE_ASM_WRITTEN (block) = 1;
5891 BLOCK_SUPERCONTEXT (block) = current_block;
5892 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5893 BLOCK_SUBBLOCKS (current_block) = block;
5894 current_block = block;
5895 VARRAY_PUSH_TREE (*p_block_stack, block);
5897 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5899 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5900 VARRAY_POP (*p_block_stack);
5901 BLOCK_SUBBLOCKS (current_block)
5902 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5903 current_block = BLOCK_SUPERCONTEXT (current_block);
5906 else if (GET_CODE (insn) == CALL_INSN
5907 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5909 rtx cp = PATTERN (insn);
5910 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5911 if (XEXP (cp, 1))
5912 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5913 if (XEXP (cp, 2))
5914 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5919 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5920 appears in the block tree, select one of the fragments to become
5921 the new origin block. */
5923 static void
5924 reorder_fix_fragments (block)
5925 tree block;
5927 while (block)
5929 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5930 tree new_origin = NULL_TREE;
5932 if (dup_origin)
5934 if (! TREE_ASM_WRITTEN (dup_origin))
5936 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
5938 /* Find the first of the remaining fragments. There must
5939 be at least one -- the current block. */
5940 while (! TREE_ASM_WRITTEN (new_origin))
5941 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
5942 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
5945 else if (! dup_origin)
5946 new_origin = block;
5948 /* Re-root the rest of the fragments to the new origin. In the
5949 case that DUP_ORIGIN was null, that means BLOCK was the origin
5950 of a chain of fragments and we want to remove those fragments
5951 that didn't make it to the output. */
5952 if (new_origin)
5954 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
5955 tree chain = *pp;
5957 while (chain)
5959 if (TREE_ASM_WRITTEN (chain))
5961 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
5962 *pp = chain;
5963 pp = &BLOCK_FRAGMENT_CHAIN (chain);
5965 chain = BLOCK_FRAGMENT_CHAIN (chain);
5967 *pp = NULL_TREE;
5970 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
5971 block = BLOCK_CHAIN (block);
5975 /* Reverse the order of elements in the chain T of blocks,
5976 and return the new head of the chain (old last element). */
5978 static tree
5979 blocks_nreverse (t)
5980 tree t;
5982 tree prev = 0, decl, next;
5983 for (decl = t; decl; decl = next)
5985 next = BLOCK_CHAIN (decl);
5986 BLOCK_CHAIN (decl) = prev;
5987 prev = decl;
5989 return prev;
5992 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5993 non-NULL, list them all into VECTOR, in a depth-first preorder
5994 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5995 blocks. */
5997 static int
5998 all_blocks (block, vector)
5999 tree block;
6000 tree *vector;
6002 int n_blocks = 0;
6004 while (block)
6006 TREE_ASM_WRITTEN (block) = 0;
6008 /* Record this block. */
6009 if (vector)
6010 vector[n_blocks] = block;
6012 ++n_blocks;
6014 /* Record the subblocks, and their subblocks... */
6015 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6016 vector ? vector + n_blocks : 0);
6017 block = BLOCK_CHAIN (block);
6020 return n_blocks;
6023 /* Return a vector containing all the blocks rooted at BLOCK. The
6024 number of elements in the vector is stored in N_BLOCKS_P. The
6025 vector is dynamically allocated; it is the caller's responsibility
6026 to call `free' on the pointer returned. */
6028 static tree *
6029 get_block_vector (block, n_blocks_p)
6030 tree block;
6031 int *n_blocks_p;
6033 tree *block_vector;
6035 *n_blocks_p = all_blocks (block, NULL);
6036 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6037 all_blocks (block, block_vector);
6039 return block_vector;
6042 static int next_block_index = 2;
6044 /* Set BLOCK_NUMBER for all the blocks in FN. */
6046 void
6047 number_blocks (fn)
6048 tree fn;
6050 int i;
6051 int n_blocks;
6052 tree *block_vector;
6054 /* For SDB and XCOFF debugging output, we start numbering the blocks
6055 from 1 within each function, rather than keeping a running
6056 count. */
6057 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6058 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6059 next_block_index = 1;
6060 #endif
6062 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6064 /* The top-level BLOCK isn't numbered at all. */
6065 for (i = 1; i < n_blocks; ++i)
6066 /* We number the blocks from two. */
6067 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6069 free (block_vector);
6071 return;
6074 /* Allocate a function structure and reset its contents to the defaults. */
6076 static void
6077 prepare_function_start ()
6079 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6081 init_stmt_for_function ();
6082 init_eh_for_function ();
6084 cse_not_expected = ! optimize;
6086 /* Caller save not needed yet. */
6087 caller_save_needed = 0;
6089 /* No stack slots have been made yet. */
6090 stack_slot_list = 0;
6092 current_function_has_nonlocal_label = 0;
6093 current_function_has_nonlocal_goto = 0;
6095 /* There is no stack slot for handling nonlocal gotos. */
6096 nonlocal_goto_handler_slots = 0;
6097 nonlocal_goto_stack_level = 0;
6099 /* No labels have been declared for nonlocal use. */
6100 nonlocal_labels = 0;
6101 nonlocal_goto_handler_labels = 0;
6103 /* No function calls so far in this function. */
6104 function_call_count = 0;
6106 /* No parm regs have been allocated.
6107 (This is important for output_inline_function.) */
6108 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6110 /* Initialize the RTL mechanism. */
6111 init_emit ();
6113 /* Initialize the queue of pending postincrement and postdecrements,
6114 and some other info in expr.c. */
6115 init_expr ();
6117 /* We haven't done register allocation yet. */
6118 reg_renumber = 0;
6120 init_varasm_status (cfun);
6122 /* Clear out data used for inlining. */
6123 cfun->inlinable = 0;
6124 cfun->original_decl_initial = 0;
6125 cfun->original_arg_vector = 0;
6127 cfun->stack_alignment_needed = STACK_BOUNDARY;
6128 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6130 /* Set if a call to setjmp is seen. */
6131 current_function_calls_setjmp = 0;
6133 /* Set if a call to longjmp is seen. */
6134 current_function_calls_longjmp = 0;
6136 current_function_calls_alloca = 0;
6137 current_function_contains_functions = 0;
6138 current_function_is_leaf = 0;
6139 current_function_nothrow = 0;
6140 current_function_sp_is_unchanging = 0;
6141 current_function_uses_only_leaf_regs = 0;
6142 current_function_has_computed_jump = 0;
6143 current_function_is_thunk = 0;
6145 current_function_returns_pcc_struct = 0;
6146 current_function_returns_struct = 0;
6147 current_function_epilogue_delay_list = 0;
6148 current_function_uses_const_pool = 0;
6149 current_function_uses_pic_offset_table = 0;
6150 current_function_cannot_inline = 0;
6152 /* We have not yet needed to make a label to jump to for tail-recursion. */
6153 tail_recursion_label = 0;
6155 /* We haven't had a need to make a save area for ap yet. */
6156 arg_pointer_save_area = 0;
6158 /* No stack slots allocated yet. */
6159 frame_offset = 0;
6161 /* No SAVE_EXPRs in this function yet. */
6162 save_expr_regs = 0;
6164 /* No RTL_EXPRs in this function yet. */
6165 rtl_expr_chain = 0;
6167 /* Set up to allocate temporaries. */
6168 init_temp_slots ();
6170 /* Indicate that we need to distinguish between the return value of the
6171 present function and the return value of a function being called. */
6172 rtx_equal_function_value_matters = 1;
6174 /* Indicate that we have not instantiated virtual registers yet. */
6175 virtuals_instantiated = 0;
6177 /* Indicate that we want CONCATs now. */
6178 generating_concat_p = 1;
6180 /* Indicate we have no need of a frame pointer yet. */
6181 frame_pointer_needed = 0;
6183 /* By default assume not varargs or stdarg. */
6184 current_function_varargs = 0;
6185 current_function_stdarg = 0;
6187 /* We haven't made any trampolines for this function yet. */
6188 trampoline_list = 0;
6190 init_pending_stack_adjust ();
6191 inhibit_defer_pop = 0;
6193 current_function_outgoing_args_size = 0;
6195 if (init_lang_status)
6196 (*init_lang_status) (cfun);
6197 if (init_machine_status)
6198 (*init_machine_status) (cfun);
6201 /* Initialize the rtl expansion mechanism so that we can do simple things
6202 like generate sequences. This is used to provide a context during global
6203 initialization of some passes. */
6204 void
6205 init_dummy_function_start ()
6207 prepare_function_start ();
6210 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6211 and initialize static variables for generating RTL for the statements
6212 of the function. */
6214 void
6215 init_function_start (subr, filename, line)
6216 tree subr;
6217 const char *filename;
6218 int line;
6220 prepare_function_start ();
6222 current_function_name = (*decl_printable_name) (subr, 2);
6223 cfun->decl = subr;
6225 /* Nonzero if this is a nested function that uses a static chain. */
6227 current_function_needs_context
6228 = (decl_function_context (current_function_decl) != 0
6229 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6231 /* Within function body, compute a type's size as soon it is laid out. */
6232 immediate_size_expand++;
6234 /* Prevent ever trying to delete the first instruction of a function.
6235 Also tell final how to output a linenum before the function prologue.
6236 Note linenums could be missing, e.g. when compiling a Java .class file. */
6237 if (line > 0)
6238 emit_line_note (filename, line);
6240 /* Make sure first insn is a note even if we don't want linenums.
6241 This makes sure the first insn will never be deleted.
6242 Also, final expects a note to appear there. */
6243 emit_note (NULL, NOTE_INSN_DELETED);
6245 /* Set flags used by final.c. */
6246 if (aggregate_value_p (DECL_RESULT (subr)))
6248 #ifdef PCC_STATIC_STRUCT_RETURN
6249 current_function_returns_pcc_struct = 1;
6250 #endif
6251 current_function_returns_struct = 1;
6254 /* Warn if this value is an aggregate type,
6255 regardless of which calling convention we are using for it. */
6256 if (warn_aggregate_return
6257 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6258 warning ("function returns an aggregate");
6260 current_function_returns_pointer
6261 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6264 /* Make sure all values used by the optimization passes have sane
6265 defaults. */
6266 void
6267 init_function_for_compilation ()
6269 reg_renumber = 0;
6271 /* No prologue/epilogue insns yet. */
6272 VARRAY_GROW (prologue, 0);
6273 VARRAY_GROW (epilogue, 0);
6274 VARRAY_GROW (sibcall_epilogue, 0);
6277 /* Indicate that the current function uses extra args
6278 not explicitly mentioned in the argument list in any fashion. */
6280 void
6281 mark_varargs ()
6283 current_function_varargs = 1;
6286 /* Expand a call to __main at the beginning of a possible main function. */
6288 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6289 #undef HAS_INIT_SECTION
6290 #define HAS_INIT_SECTION
6291 #endif
6293 void
6294 expand_main_function ()
6296 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6297 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6299 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6300 rtx tmp;
6302 /* Forcibly align the stack. */
6303 #ifdef STACK_GROWS_DOWNWARD
6304 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6305 stack_pointer_rtx, 1, OPTAB_WIDEN);
6306 #else
6307 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6308 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6309 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6310 stack_pointer_rtx, 1, OPTAB_WIDEN);
6311 #endif
6312 if (tmp != stack_pointer_rtx)
6313 emit_move_insn (stack_pointer_rtx, tmp);
6315 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6316 tmp = force_reg (Pmode, const0_rtx);
6317 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6319 #endif
6321 #ifndef HAS_INIT_SECTION
6322 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6323 VOIDmode, 0);
6324 #endif
6327 extern struct obstack permanent_obstack;
6329 /* The PENDING_SIZES represent the sizes of variable-sized types.
6330 Create RTL for the various sizes now (using temporary variables),
6331 so that we can refer to the sizes from the RTL we are generating
6332 for the current function. The PENDING_SIZES are a TREE_LIST. The
6333 TREE_VALUE of each node is a SAVE_EXPR. */
6335 void
6336 expand_pending_sizes (pending_sizes)
6337 tree pending_sizes;
6339 tree tem;
6341 /* Evaluate now the sizes of any types declared among the arguments. */
6342 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6344 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6345 EXPAND_MEMORY_USE_BAD);
6346 /* Flush the queue in case this parameter declaration has
6347 side-effects. */
6348 emit_queue ();
6352 /* Start the RTL for a new function, and set variables used for
6353 emitting RTL.
6354 SUBR is the FUNCTION_DECL node.
6355 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6356 the function's parameters, which must be run at any return statement. */
6358 void
6359 expand_function_start (subr, parms_have_cleanups)
6360 tree subr;
6361 int parms_have_cleanups;
6363 tree tem;
6364 rtx last_ptr = NULL_RTX;
6366 /* Make sure volatile mem refs aren't considered
6367 valid operands of arithmetic insns. */
6368 init_recog_no_volatile ();
6370 /* Set this before generating any memory accesses. */
6371 current_function_check_memory_usage
6372 = (flag_check_memory_usage
6373 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6375 current_function_instrument_entry_exit
6376 = (flag_instrument_function_entry_exit
6377 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6379 current_function_limit_stack
6380 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6382 /* If function gets a static chain arg, store it in the stack frame.
6383 Do this first, so it gets the first stack slot offset. */
6384 if (current_function_needs_context)
6386 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6388 /* Delay copying static chain if it is not a register to avoid
6389 conflicts with regs used for parameters. */
6390 if (! SMALL_REGISTER_CLASSES
6391 || GET_CODE (static_chain_incoming_rtx) == REG)
6392 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6395 /* If the parameters of this function need cleaning up, get a label
6396 for the beginning of the code which executes those cleanups. This must
6397 be done before doing anything with return_label. */
6398 if (parms_have_cleanups)
6399 cleanup_label = gen_label_rtx ();
6400 else
6401 cleanup_label = 0;
6403 /* Make the label for return statements to jump to. Do not special
6404 case machines with special return instructions -- they will be
6405 handled later during jump, ifcvt, or epilogue creation. */
6406 return_label = gen_label_rtx ();
6408 /* Initialize rtx used to return the value. */
6409 /* Do this before assign_parms so that we copy the struct value address
6410 before any library calls that assign parms might generate. */
6412 /* Decide whether to return the value in memory or in a register. */
6413 if (aggregate_value_p (DECL_RESULT (subr)))
6415 /* Returning something that won't go in a register. */
6416 rtx value_address = 0;
6418 #ifdef PCC_STATIC_STRUCT_RETURN
6419 if (current_function_returns_pcc_struct)
6421 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6422 value_address = assemble_static_space (size);
6424 else
6425 #endif
6427 /* Expect to be passed the address of a place to store the value.
6428 If it is passed as an argument, assign_parms will take care of
6429 it. */
6430 if (struct_value_incoming_rtx)
6432 value_address = gen_reg_rtx (Pmode);
6433 emit_move_insn (value_address, struct_value_incoming_rtx);
6436 if (value_address)
6438 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6439 set_mem_attributes (x, DECL_RESULT (subr), 1);
6440 SET_DECL_RTL (DECL_RESULT (subr), x);
6443 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6444 /* If return mode is void, this decl rtl should not be used. */
6445 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6446 else
6448 /* Compute the return values into a pseudo reg, which we will copy
6449 into the true return register after the cleanups are done. */
6451 /* In order to figure out what mode to use for the pseudo, we
6452 figure out what the mode of the eventual return register will
6453 actually be, and use that. */
6454 rtx hard_reg
6455 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6456 subr, 1);
6458 /* Structures that are returned in registers are not aggregate_value_p,
6459 so we may see a PARALLEL. Don't play pseudo games with this. */
6460 if (! REG_P (hard_reg))
6461 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6462 else
6464 /* Create the pseudo. */
6465 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6467 /* Needed because we may need to move this to memory
6468 in case it's a named return value whose address is taken. */
6469 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6473 /* Initialize rtx for parameters and local variables.
6474 In some cases this requires emitting insns. */
6476 assign_parms (subr);
6478 /* Copy the static chain now if it wasn't a register. The delay is to
6479 avoid conflicts with the parameter passing registers. */
6481 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6482 if (GET_CODE (static_chain_incoming_rtx) != REG)
6483 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6485 /* The following was moved from init_function_start.
6486 The move is supposed to make sdb output more accurate. */
6487 /* Indicate the beginning of the function body,
6488 as opposed to parm setup. */
6489 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6491 if (GET_CODE (get_last_insn ()) != NOTE)
6492 emit_note (NULL, NOTE_INSN_DELETED);
6493 parm_birth_insn = get_last_insn ();
6495 context_display = 0;
6496 if (current_function_needs_context)
6498 /* Fetch static chain values for containing functions. */
6499 tem = decl_function_context (current_function_decl);
6500 /* Copy the static chain pointer into a pseudo. If we have
6501 small register classes, copy the value from memory if
6502 static_chain_incoming_rtx is a REG. */
6503 if (tem)
6505 /* If the static chain originally came in a register, put it back
6506 there, then move it out in the next insn. The reason for
6507 this peculiar code is to satisfy function integration. */
6508 if (SMALL_REGISTER_CLASSES
6509 && GET_CODE (static_chain_incoming_rtx) == REG)
6510 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6511 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6514 while (tem)
6516 tree rtlexp = make_node (RTL_EXPR);
6518 RTL_EXPR_RTL (rtlexp) = last_ptr;
6519 context_display = tree_cons (tem, rtlexp, context_display);
6520 tem = decl_function_context (tem);
6521 if (tem == 0)
6522 break;
6523 /* Chain thru stack frames, assuming pointer to next lexical frame
6524 is found at the place we always store it. */
6525 #ifdef FRAME_GROWS_DOWNWARD
6526 last_ptr = plus_constant (last_ptr,
6527 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6528 #endif
6529 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6530 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6531 last_ptr = copy_to_reg (last_ptr);
6533 /* If we are not optimizing, ensure that we know that this
6534 piece of context is live over the entire function. */
6535 if (! optimize)
6536 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6537 save_expr_regs);
6541 if (current_function_instrument_entry_exit)
6543 rtx fun = DECL_RTL (current_function_decl);
6544 if (GET_CODE (fun) == MEM)
6545 fun = XEXP (fun, 0);
6546 else
6547 abort ();
6548 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6549 2, fun, Pmode,
6550 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6552 hard_frame_pointer_rtx),
6553 Pmode);
6556 #ifdef PROFILE_HOOK
6557 if (profile_flag)
6558 PROFILE_HOOK (profile_label_no);
6559 #endif
6561 /* After the display initializations is where the tail-recursion label
6562 should go, if we end up needing one. Ensure we have a NOTE here
6563 since some things (like trampolines) get placed before this. */
6564 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6566 /* Evaluate now the sizes of any types declared among the arguments. */
6567 expand_pending_sizes (nreverse (get_pending_sizes ()));
6569 /* Make sure there is a line number after the function entry setup code. */
6570 force_next_line_note ();
6573 /* Undo the effects of init_dummy_function_start. */
6574 void
6575 expand_dummy_function_end ()
6577 /* End any sequences that failed to be closed due to syntax errors. */
6578 while (in_sequence_p ())
6579 end_sequence ();
6581 /* Outside function body, can't compute type's actual size
6582 until next function's body starts. */
6584 free_after_parsing (cfun);
6585 free_after_compilation (cfun);
6586 cfun = 0;
6589 /* Call DOIT for each hard register used as a return value from
6590 the current function. */
6592 void
6593 diddle_return_value (doit, arg)
6594 void (*doit) PARAMS ((rtx, void *));
6595 void *arg;
6597 rtx outgoing = current_function_return_rtx;
6599 if (! outgoing)
6600 return;
6602 if (GET_CODE (outgoing) == REG)
6603 (*doit) (outgoing, arg);
6604 else if (GET_CODE (outgoing) == PARALLEL)
6606 int i;
6608 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6610 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6612 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6613 (*doit) (x, arg);
6618 static void
6619 do_clobber_return_reg (reg, arg)
6620 rtx reg;
6621 void *arg ATTRIBUTE_UNUSED;
6623 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6626 void
6627 clobber_return_register ()
6629 diddle_return_value (do_clobber_return_reg, NULL);
6631 /* In case we do use pseudo to return value, clobber it too. */
6632 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6634 tree decl_result = DECL_RESULT (current_function_decl);
6635 rtx decl_rtl = DECL_RTL (decl_result);
6636 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6638 do_clobber_return_reg (decl_rtl, NULL);
6643 static void
6644 do_use_return_reg (reg, arg)
6645 rtx reg;
6646 void *arg ATTRIBUTE_UNUSED;
6648 emit_insn (gen_rtx_USE (VOIDmode, reg));
6651 void
6652 use_return_register ()
6654 diddle_return_value (do_use_return_reg, NULL);
6657 /* Generate RTL for the end of the current function.
6658 FILENAME and LINE are the current position in the source file.
6660 It is up to language-specific callers to do cleanups for parameters--
6661 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6663 void
6664 expand_function_end (filename, line, end_bindings)
6665 const char *filename;
6666 int line;
6667 int end_bindings;
6669 tree link;
6670 rtx clobber_after;
6672 #ifdef TRAMPOLINE_TEMPLATE
6673 static rtx initial_trampoline;
6674 #endif
6676 finish_expr_for_function ();
6678 /* If arg_pointer_save_area was referenced only from a nested
6679 function, we will not have initialized it yet. Do that now. */
6680 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6681 get_arg_pointer_save_area (cfun);
6683 #ifdef NON_SAVING_SETJMP
6684 /* Don't put any variables in registers if we call setjmp
6685 on a machine that fails to restore the registers. */
6686 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6688 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6689 setjmp_protect (DECL_INITIAL (current_function_decl));
6691 setjmp_protect_args ();
6693 #endif
6695 /* Initialize any trampolines required by this function. */
6696 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6698 tree function = TREE_PURPOSE (link);
6699 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6700 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6701 #ifdef TRAMPOLINE_TEMPLATE
6702 rtx blktramp;
6703 #endif
6704 rtx seq;
6706 #ifdef TRAMPOLINE_TEMPLATE
6707 /* First make sure this compilation has a template for
6708 initializing trampolines. */
6709 if (initial_trampoline == 0)
6711 initial_trampoline
6712 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6713 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6715 ggc_add_rtx_root (&initial_trampoline, 1);
6717 #endif
6719 /* Generate insns to initialize the trampoline. */
6720 start_sequence ();
6721 tramp = round_trampoline_addr (XEXP (tramp, 0));
6722 #ifdef TRAMPOLINE_TEMPLATE
6723 blktramp = replace_equiv_address (initial_trampoline, tramp);
6724 emit_block_move (blktramp, initial_trampoline,
6725 GEN_INT (TRAMPOLINE_SIZE));
6726 #endif
6727 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6728 seq = get_insns ();
6729 end_sequence ();
6731 /* Put those insns at entry to the containing function (this one). */
6732 emit_insns_before (seq, tail_recursion_reentry);
6735 /* If we are doing stack checking and this function makes calls,
6736 do a stack probe at the start of the function to ensure we have enough
6737 space for another stack frame. */
6738 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6740 rtx insn, seq;
6742 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6743 if (GET_CODE (insn) == CALL_INSN)
6745 start_sequence ();
6746 probe_stack_range (STACK_CHECK_PROTECT,
6747 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6748 seq = get_insns ();
6749 end_sequence ();
6750 emit_insns_before (seq, tail_recursion_reentry);
6751 break;
6755 /* Warn about unused parms if extra warnings were specified. */
6756 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6757 warning. WARN_UNUSED_PARAMETER is negative when set by
6758 -Wunused. */
6759 if (warn_unused_parameter > 0
6760 || (warn_unused_parameter < 0 && extra_warnings))
6762 tree decl;
6764 for (decl = DECL_ARGUMENTS (current_function_decl);
6765 decl; decl = TREE_CHAIN (decl))
6766 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6767 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6768 warning_with_decl (decl, "unused parameter `%s'");
6771 /* Delete handlers for nonlocal gotos if nothing uses them. */
6772 if (nonlocal_goto_handler_slots != 0
6773 && ! current_function_has_nonlocal_label)
6774 delete_handlers ();
6776 /* End any sequences that failed to be closed due to syntax errors. */
6777 while (in_sequence_p ())
6778 end_sequence ();
6780 /* Outside function body, can't compute type's actual size
6781 until next function's body starts. */
6782 immediate_size_expand--;
6784 clear_pending_stack_adjust ();
6785 do_pending_stack_adjust ();
6787 /* Mark the end of the function body.
6788 If control reaches this insn, the function can drop through
6789 without returning a value. */
6790 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6792 /* Must mark the last line number note in the function, so that the test
6793 coverage code can avoid counting the last line twice. This just tells
6794 the code to ignore the immediately following line note, since there
6795 already exists a copy of this note somewhere above. This line number
6796 note is still needed for debugging though, so we can't delete it. */
6797 if (flag_test_coverage)
6798 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6800 /* Output a linenumber for the end of the function.
6801 SDB depends on this. */
6802 emit_line_note_force (filename, line);
6804 /* Before the return label (if any), clobber the return
6805 registers so that they are not propagated live to the rest of
6806 the function. This can only happen with functions that drop
6807 through; if there had been a return statement, there would
6808 have either been a return rtx, or a jump to the return label.
6810 We delay actual code generation after the current_function_value_rtx
6811 is computed. */
6812 clobber_after = get_last_insn ();
6814 /* Output the label for the actual return from the function,
6815 if one is expected. This happens either because a function epilogue
6816 is used instead of a return instruction, or because a return was done
6817 with a goto in order to run local cleanups, or because of pcc-style
6818 structure returning. */
6819 if (return_label)
6820 emit_label (return_label);
6822 /* C++ uses this. */
6823 if (end_bindings)
6824 expand_end_bindings (0, 0, 0);
6826 if (current_function_instrument_entry_exit)
6828 rtx fun = DECL_RTL (current_function_decl);
6829 if (GET_CODE (fun) == MEM)
6830 fun = XEXP (fun, 0);
6831 else
6832 abort ();
6833 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6834 2, fun, Pmode,
6835 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6837 hard_frame_pointer_rtx),
6838 Pmode);
6841 /* Let except.c know where it should emit the call to unregister
6842 the function context for sjlj exceptions. */
6843 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6844 sjlj_emit_function_exit_after (get_last_insn ());
6846 /* If we had calls to alloca, and this machine needs
6847 an accurate stack pointer to exit the function,
6848 insert some code to save and restore the stack pointer. */
6849 #ifdef EXIT_IGNORE_STACK
6850 if (! EXIT_IGNORE_STACK)
6851 #endif
6852 if (current_function_calls_alloca)
6854 rtx tem = 0;
6856 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6857 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6860 /* If scalar return value was computed in a pseudo-reg, or was a named
6861 return value that got dumped to the stack, copy that to the hard
6862 return register. */
6863 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6865 tree decl_result = DECL_RESULT (current_function_decl);
6866 rtx decl_rtl = DECL_RTL (decl_result);
6868 if (REG_P (decl_rtl)
6869 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6870 : DECL_REGISTER (decl_result))
6872 rtx real_decl_rtl;
6874 #ifdef FUNCTION_OUTGOING_VALUE
6875 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6876 current_function_decl);
6877 #else
6878 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6879 current_function_decl);
6880 #endif
6881 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6883 /* If this is a BLKmode structure being returned in registers,
6884 then use the mode computed in expand_return. Note that if
6885 decl_rtl is memory, then its mode may have been changed,
6886 but that current_function_return_rtx has not. */
6887 if (GET_MODE (real_decl_rtl) == BLKmode)
6888 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6890 /* If a named return value dumped decl_return to memory, then
6891 we may need to re-do the PROMOTE_MODE signed/unsigned
6892 extension. */
6893 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6895 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6897 #ifdef PROMOTE_FUNCTION_RETURN
6898 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6899 &unsignedp, 1);
6900 #endif
6902 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6904 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6905 emit_group_load (real_decl_rtl, decl_rtl,
6906 int_size_in_bytes (TREE_TYPE (decl_result)));
6907 else
6908 emit_move_insn (real_decl_rtl, decl_rtl);
6910 /* The delay slot scheduler assumes that current_function_return_rtx
6911 holds the hard register containing the return value, not a
6912 temporary pseudo. */
6913 current_function_return_rtx = real_decl_rtl;
6917 /* If returning a structure, arrange to return the address of the value
6918 in a place where debuggers expect to find it.
6920 If returning a structure PCC style,
6921 the caller also depends on this value.
6922 And current_function_returns_pcc_struct is not necessarily set. */
6923 if (current_function_returns_struct
6924 || current_function_returns_pcc_struct)
6926 rtx value_address
6927 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6928 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6929 #ifdef FUNCTION_OUTGOING_VALUE
6930 rtx outgoing
6931 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6932 current_function_decl);
6933 #else
6934 rtx outgoing
6935 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6936 #endif
6938 /* Mark this as a function return value so integrate will delete the
6939 assignment and USE below when inlining this function. */
6940 REG_FUNCTION_VALUE_P (outgoing) = 1;
6942 #ifdef POINTERS_EXTEND_UNSIGNED
6943 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6944 if (GET_MODE (outgoing) != GET_MODE (value_address))
6945 value_address = convert_memory_address (GET_MODE (outgoing),
6946 value_address);
6947 #endif
6949 emit_move_insn (outgoing, value_address);
6951 /* Show return register used to hold result (in this case the address
6952 of the result. */
6953 current_function_return_rtx = outgoing;
6956 /* If this is an implementation of throw, do what's necessary to
6957 communicate between __builtin_eh_return and the epilogue. */
6958 expand_eh_return ();
6960 /* Emit the actual code to clobber return register. */
6962 rtx seq, after;
6964 start_sequence ();
6965 clobber_return_register ();
6966 seq = gen_sequence ();
6967 end_sequence ();
6969 after = emit_insn_after (seq, clobber_after);
6971 if (clobber_after != after)
6972 cfun->x_clobber_return_insn = after;
6975 /* ??? This should no longer be necessary since stupid is no longer with
6976 us, but there are some parts of the compiler (eg reload_combine, and
6977 sh mach_dep_reorg) that still try and compute their own lifetime info
6978 instead of using the general framework. */
6979 use_return_register ();
6981 /* Fix up any gotos that jumped out to the outermost
6982 binding level of the function.
6983 Must follow emitting RETURN_LABEL. */
6985 /* If you have any cleanups to do at this point,
6986 and they need to create temporary variables,
6987 then you will lose. */
6988 expand_fixups (get_insns ());
6992 get_arg_pointer_save_area (f)
6993 struct function *f;
6995 rtx ret = f->x_arg_pointer_save_area;
6997 if (! ret)
6999 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7000 f->x_arg_pointer_save_area = ret;
7003 if (f == cfun && ! f->arg_pointer_save_area_init)
7005 rtx seq;
7007 /* Save the arg pointer at the beginning of the function. The
7008 generated stack slot may not be a valid memory address, so we
7009 have to check it and fix it if necessary. */
7010 start_sequence ();
7011 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7012 seq = gen_sequence ();
7013 end_sequence ();
7015 push_topmost_sequence ();
7016 emit_insn_after (seq, get_insns ());
7017 pop_topmost_sequence ();
7020 return ret;
7023 /* Extend a vector that records the INSN_UIDs of INSNS (either a
7024 sequence or a single insn). */
7026 static void
7027 record_insns (insns, vecp)
7028 rtx insns;
7029 varray_type *vecp;
7031 if (GET_CODE (insns) == SEQUENCE)
7033 int len = XVECLEN (insns, 0);
7034 int i = VARRAY_SIZE (*vecp);
7036 VARRAY_GROW (*vecp, i + len);
7037 while (--len >= 0)
7039 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
7040 ++i;
7043 else
7045 int i = VARRAY_SIZE (*vecp);
7046 VARRAY_GROW (*vecp, i + 1);
7047 VARRAY_INT (*vecp, i) = INSN_UID (insns);
7051 /* Determine how many INSN_UIDs in VEC are part of INSN. */
7053 static int
7054 contains (insn, vec)
7055 rtx insn;
7056 varray_type vec;
7058 int i, j;
7060 if (GET_CODE (insn) == INSN
7061 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7063 int count = 0;
7064 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7065 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7066 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7067 count++;
7068 return count;
7070 else
7072 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7073 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7074 return 1;
7076 return 0;
7080 prologue_epilogue_contains (insn)
7081 rtx insn;
7083 if (contains (insn, prologue))
7084 return 1;
7085 if (contains (insn, epilogue))
7086 return 1;
7087 return 0;
7091 sibcall_epilogue_contains (insn)
7092 rtx insn;
7094 if (sibcall_epilogue)
7095 return contains (insn, sibcall_epilogue);
7096 return 0;
7099 #ifdef HAVE_return
7100 /* Insert gen_return at the end of block BB. This also means updating
7101 block_for_insn appropriately. */
7103 static void
7104 emit_return_into_block (bb, line_note)
7105 basic_block bb;
7106 rtx line_note;
7108 rtx p, end;
7110 p = NEXT_INSN (bb->end);
7111 end = emit_jump_insn_after (gen_return (), bb->end);
7112 if (line_note)
7113 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7114 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7116 #endif /* HAVE_return */
7118 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7120 /* These functions convert the epilogue into a variant that does not modify the
7121 stack pointer. This is used in cases where a function returns an object
7122 whose size is not known until it is computed. The called function leavs the
7123 object on the stack, leaves the stack depressed, and returns a pointer to
7124 the object.
7126 What we need to do is track all modifications and references to the stack
7127 pointer, deleting the modifications and changing the references to point to
7128 the location the stack pointer would have pointed to had the modifications
7129 taken place.
7131 These functions need to be portable so we need to make as few assumptions
7132 about the epilogue as we can. However, the epilogue basically contains
7133 three things: instructions to reset the stack pointer, instructions to
7134 reload registers, possibly including the frame pointer, and an
7135 instruction to return to the caller.
7137 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7138 We also make no attempt to validate the insns we make since if they are
7139 invalid, we probably can't do anything valid. The intent is that these
7140 routines get "smarter" as more and more machines start to use them and
7141 they try operating on different epilogues.
7143 We use the following structure to track what the part of the epilogue that
7144 we've already processed has done. We keep two copies of the SP equivalence,
7145 one for use during the insn we are processing and one for use in the next
7146 insn. The difference is because one part of a PARALLEL may adjust SP
7147 and the other may use it. */
7149 struct epi_info
7151 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7152 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7153 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7154 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7155 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7156 should be set to once we no longer need
7157 its value. */
7160 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7161 static void emit_equiv_load PARAMS ((struct epi_info *));
7163 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7164 to the stack pointer. Return the new sequence. */
7166 static rtx
7167 keep_stack_depressed (seq)
7168 rtx seq;
7170 int i, j;
7171 struct epi_info info;
7173 /* If the epilogue is just a single instruction, it ust be OK as is. */
7175 if (GET_CODE (seq) != SEQUENCE)
7176 return seq;
7178 /* Otherwise, start a sequence, initialize the information we have, and
7179 process all the insns we were given. */
7180 start_sequence ();
7182 info.sp_equiv_reg = stack_pointer_rtx;
7183 info.sp_offset = 0;
7184 info.equiv_reg_src = 0;
7186 for (i = 0; i < XVECLEN (seq, 0); i++)
7188 rtx insn = XVECEXP (seq, 0, i);
7190 if (!INSN_P (insn))
7192 add_insn (insn);
7193 continue;
7196 /* If this insn references the register that SP is equivalent to and
7197 we have a pending load to that register, we must force out the load
7198 first and then indicate we no longer know what SP's equivalent is. */
7199 if (info.equiv_reg_src != 0
7200 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7202 emit_equiv_load (&info);
7203 info.sp_equiv_reg = 0;
7206 info.new_sp_equiv_reg = info.sp_equiv_reg;
7207 info.new_sp_offset = info.sp_offset;
7209 /* If this is a (RETURN) and the return address is on the stack,
7210 update the address and change to an indirect jump. */
7211 if (GET_CODE (PATTERN (insn)) == RETURN
7212 || (GET_CODE (PATTERN (insn)) == PARALLEL
7213 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7215 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7216 rtx base = 0;
7217 HOST_WIDE_INT offset = 0;
7218 rtx jump_insn, jump_set;
7220 /* If the return address is in a register, we can emit the insn
7221 unchanged. Otherwise, it must be a MEM and we see what the
7222 base register and offset are. In any case, we have to emit any
7223 pending load to the equivalent reg of SP, if any. */
7224 if (GET_CODE (retaddr) == REG)
7226 emit_equiv_load (&info);
7227 add_insn (insn);
7228 continue;
7230 else if (GET_CODE (retaddr) == MEM
7231 && GET_CODE (XEXP (retaddr, 0)) == REG)
7232 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7233 else if (GET_CODE (retaddr) == MEM
7234 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7235 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7236 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7238 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7239 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7241 else
7242 abort ();
7244 /* If the base of the location containing the return pointer
7245 is SP, we must update it with the replacement address. Otherwise,
7246 just build the necessary MEM. */
7247 retaddr = plus_constant (base, offset);
7248 if (base == stack_pointer_rtx)
7249 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7250 plus_constant (info.sp_equiv_reg,
7251 info.sp_offset));
7253 retaddr = gen_rtx_MEM (Pmode, retaddr);
7255 /* If there is a pending load to the equivalent register for SP
7256 and we reference that register, we must load our address into
7257 a scratch register and then do that load. */
7258 if (info.equiv_reg_src
7259 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7261 unsigned int regno;
7262 rtx reg;
7264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7265 if (HARD_REGNO_MODE_OK (regno, Pmode)
7266 && !fixed_regs[regno] && call_used_regs[regno]
7267 && !FUNCTION_VALUE_REGNO_P (regno))
7268 break;
7270 if (regno == FIRST_PSEUDO_REGISTER)
7271 abort ();
7273 reg = gen_rtx_REG (Pmode, regno);
7274 emit_move_insn (reg, retaddr);
7275 retaddr = reg;
7278 emit_equiv_load (&info);
7279 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7281 /* Show the SET in the above insn is a RETURN. */
7282 jump_set = single_set (jump_insn);
7283 if (jump_set == 0)
7284 abort ();
7285 else
7286 SET_IS_RETURN_P (jump_set) = 1;
7289 /* If SP is not mentioned in the pattern and its equivalent register, if
7290 any, is not modified, just emit it. Otherwise, if neither is set,
7291 replace the reference to SP and emit the insn. If none of those are
7292 true, handle each SET individually. */
7293 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7294 && (info.sp_equiv_reg == stack_pointer_rtx
7295 || !reg_set_p (info.sp_equiv_reg, insn)))
7296 add_insn (insn);
7297 else if (! reg_set_p (stack_pointer_rtx, insn)
7298 && (info.sp_equiv_reg == stack_pointer_rtx
7299 || !reg_set_p (info.sp_equiv_reg, insn)))
7301 if (! validate_replace_rtx (stack_pointer_rtx,
7302 plus_constant (info.sp_equiv_reg,
7303 info.sp_offset),
7304 insn))
7305 abort ();
7307 add_insn (insn);
7309 else if (GET_CODE (PATTERN (insn)) == SET)
7310 handle_epilogue_set (PATTERN (insn), &info);
7311 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7313 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7314 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7315 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7317 else
7318 add_insn (insn);
7320 info.sp_equiv_reg = info.new_sp_equiv_reg;
7321 info.sp_offset = info.new_sp_offset;
7324 seq = gen_sequence ();
7325 end_sequence ();
7326 return seq;
7329 /* SET is a SET from an insn in the epilogue. P is a pointr to the epi_info
7330 structure that contains information about what we've seen so far. We
7331 process this SET by either updating that data or by emitting one or
7332 more insns. */
7334 static void
7335 handle_epilogue_set (set, p)
7336 rtx set;
7337 struct epi_info *p;
7339 /* First handle the case where we are setting SP. Record what it is being
7340 set from. If unknown, abort. */
7341 if (reg_set_p (stack_pointer_rtx, set))
7343 if (SET_DEST (set) != stack_pointer_rtx)
7344 abort ();
7346 if (GET_CODE (SET_SRC (set)) == PLUS
7347 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7349 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7350 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7352 else
7353 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7355 /* If we are adjusting SP, we adjust from the old data. */
7356 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7358 p->new_sp_equiv_reg = p->sp_equiv_reg;
7359 p->new_sp_offset += p->sp_offset;
7362 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7363 abort ();
7365 return;
7368 /* Next handle the case where we are setting SP's equivalent register.
7369 If we already have a value to set it to, abort. We could update, but
7370 there seems little point in handling that case. */
7371 else if (p->sp_equiv_reg != 0 && reg_set_p (p->sp_equiv_reg, set))
7373 if (!rtx_equal_p (p->sp_equiv_reg, SET_DEST (set))
7374 || p->equiv_reg_src != 0)
7375 abort ();
7376 else
7377 p->equiv_reg_src
7378 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7379 plus_constant (p->sp_equiv_reg,
7380 p->sp_offset));
7383 /* Otherwise, replace any references to SP in the insn to its new value
7384 and emit the insn. */
7385 else
7387 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7388 plus_constant (p->sp_equiv_reg,
7389 p->sp_offset));
7390 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7391 plus_constant (p->sp_equiv_reg,
7392 p->sp_offset));
7393 emit_insn (set);
7397 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7399 static void
7400 emit_equiv_load (p)
7401 struct epi_info *p;
7403 if (p->equiv_reg_src != 0)
7404 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7406 p->equiv_reg_src = 0;
7408 #endif
7410 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7411 this into place with notes indicating where the prologue ends and where
7412 the epilogue begins. Update the basic block information when possible. */
7414 void
7415 thread_prologue_and_epilogue_insns (f)
7416 rtx f ATTRIBUTE_UNUSED;
7418 int inserted = 0;
7419 edge e;
7420 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7421 rtx seq;
7422 #endif
7423 #ifdef HAVE_prologue
7424 rtx prologue_end = NULL_RTX;
7425 #endif
7426 #if defined (HAVE_epilogue) || defined(HAVE_return)
7427 rtx epilogue_end = NULL_RTX;
7428 #endif
7430 #ifdef HAVE_prologue
7431 if (HAVE_prologue)
7433 start_sequence ();
7434 seq = gen_prologue ();
7435 emit_insn (seq);
7437 /* Retain a map of the prologue insns. */
7438 if (GET_CODE (seq) != SEQUENCE)
7439 seq = get_insns ();
7440 record_insns (seq, &prologue);
7441 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7443 seq = gen_sequence ();
7444 end_sequence ();
7446 /* Can't deal with multiple successsors of the entry block
7447 at the moment. Function should always have at least one
7448 entry point. */
7449 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7450 abort ();
7452 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7453 inserted = 1;
7455 #endif
7457 /* If the exit block has no non-fake predecessors, we don't need
7458 an epilogue. */
7459 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7460 if ((e->flags & EDGE_FAKE) == 0)
7461 break;
7462 if (e == NULL)
7463 goto epilogue_done;
7465 #ifdef HAVE_return
7466 if (optimize && HAVE_return)
7468 /* If we're allowed to generate a simple return instruction,
7469 then by definition we don't need a full epilogue. Examine
7470 the block that falls through to EXIT. If it does not
7471 contain any code, examine its predecessors and try to
7472 emit (conditional) return instructions. */
7474 basic_block last;
7475 edge e_next;
7476 rtx label;
7478 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7479 if (e->flags & EDGE_FALLTHRU)
7480 break;
7481 if (e == NULL)
7482 goto epilogue_done;
7483 last = e->src;
7485 /* Verify that there are no active instructions in the last block. */
7486 label = last->end;
7487 while (label && GET_CODE (label) != CODE_LABEL)
7489 if (active_insn_p (label))
7490 break;
7491 label = PREV_INSN (label);
7494 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7496 rtx epilogue_line_note = NULL_RTX;
7498 /* Locate the line number associated with the closing brace,
7499 if we can find one. */
7500 for (seq = get_last_insn ();
7501 seq && ! active_insn_p (seq);
7502 seq = PREV_INSN (seq))
7503 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7505 epilogue_line_note = seq;
7506 break;
7509 for (e = last->pred; e; e = e_next)
7511 basic_block bb = e->src;
7512 rtx jump;
7514 e_next = e->pred_next;
7515 if (bb == ENTRY_BLOCK_PTR)
7516 continue;
7518 jump = bb->end;
7519 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7520 continue;
7522 /* If we have an unconditional jump, we can replace that
7523 with a simple return instruction. */
7524 if (simplejump_p (jump))
7526 emit_return_into_block (bb, epilogue_line_note);
7527 delete_insn (jump);
7530 /* If we have a conditional jump, we can try to replace
7531 that with a conditional return instruction. */
7532 else if (condjump_p (jump))
7534 rtx ret, *loc;
7536 ret = SET_SRC (PATTERN (jump));
7537 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7538 loc = &XEXP (ret, 1);
7539 else
7540 loc = &XEXP (ret, 2);
7541 ret = gen_rtx_RETURN (VOIDmode);
7543 if (! validate_change (jump, loc, ret, 0))
7544 continue;
7545 if (JUMP_LABEL (jump))
7546 LABEL_NUSES (JUMP_LABEL (jump))--;
7548 /* If this block has only one successor, it both jumps
7549 and falls through to the fallthru block, so we can't
7550 delete the edge. */
7551 if (bb->succ->succ_next == NULL)
7552 continue;
7554 else
7555 continue;
7557 /* Fix up the CFG for the successful change we just made. */
7558 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7561 /* Emit a return insn for the exit fallthru block. Whether
7562 this is still reachable will be determined later. */
7564 emit_barrier_after (last->end);
7565 emit_return_into_block (last, epilogue_line_note);
7566 epilogue_end = last->end;
7567 last->succ->flags &= ~EDGE_FALLTHRU;
7568 goto epilogue_done;
7571 #endif
7572 #ifdef HAVE_epilogue
7573 if (HAVE_epilogue)
7575 /* Find the edge that falls through to EXIT. Other edges may exist
7576 due to RETURN instructions, but those don't need epilogues.
7577 There really shouldn't be a mixture -- either all should have
7578 been converted or none, however... */
7580 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7581 if (e->flags & EDGE_FALLTHRU)
7582 break;
7583 if (e == NULL)
7584 goto epilogue_done;
7586 start_sequence ();
7587 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7589 seq = gen_epilogue ();
7591 #ifdef INCOMING_RETURN_ADDR_RTX
7592 /* If this function returns with the stack depressed and we can support
7593 it, massage the epilogue to actually do that. */
7594 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7595 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7596 seq = keep_stack_depressed (seq);
7597 #endif
7599 emit_jump_insn (seq);
7601 /* Retain a map of the epilogue insns. */
7602 if (GET_CODE (seq) != SEQUENCE)
7603 seq = get_insns ();
7604 record_insns (seq, &epilogue);
7606 seq = gen_sequence ();
7607 end_sequence ();
7609 insert_insn_on_edge (seq, e);
7610 inserted = 1;
7612 #endif
7613 epilogue_done:
7615 if (inserted)
7616 commit_edge_insertions ();
7618 #ifdef HAVE_sibcall_epilogue
7619 /* Emit sibling epilogues before any sibling call sites. */
7620 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7622 basic_block bb = e->src;
7623 rtx insn = bb->end;
7624 rtx i;
7625 rtx newinsn;
7627 if (GET_CODE (insn) != CALL_INSN
7628 || ! SIBLING_CALL_P (insn))
7629 continue;
7631 start_sequence ();
7632 seq = gen_sibcall_epilogue ();
7633 end_sequence ();
7635 i = PREV_INSN (insn);
7636 newinsn = emit_insn_before (seq, insn);
7638 /* Retain a map of the epilogue insns. Used in life analysis to
7639 avoid getting rid of sibcall epilogue insns. */
7640 record_insns (GET_CODE (seq) == SEQUENCE
7641 ? seq : newinsn, &sibcall_epilogue);
7643 #endif
7645 #ifdef HAVE_prologue
7646 if (prologue_end)
7648 rtx insn, prev;
7650 /* GDB handles `break f' by setting a breakpoint on the first
7651 line note after the prologue. Which means (1) that if
7652 there are line number notes before where we inserted the
7653 prologue we should move them, and (2) we should generate a
7654 note before the end of the first basic block, if there isn't
7655 one already there.
7657 ??? This behaviour is completely broken when dealing with
7658 multiple entry functions. We simply place the note always
7659 into first basic block and let alternate entry points
7660 to be missed.
7663 for (insn = prologue_end; insn; insn = prev)
7665 prev = PREV_INSN (insn);
7666 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7668 /* Note that we cannot reorder the first insn in the
7669 chain, since rest_of_compilation relies on that
7670 remaining constant. */
7671 if (prev == NULL)
7672 break;
7673 reorder_insns (insn, insn, prologue_end);
7677 /* Find the last line number note in the first block. */
7678 for (insn = BASIC_BLOCK (0)->end;
7679 insn != prologue_end && insn;
7680 insn = PREV_INSN (insn))
7681 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7682 break;
7684 /* If we didn't find one, make a copy of the first line number
7685 we run across. */
7686 if (! insn)
7688 for (insn = next_active_insn (prologue_end);
7689 insn;
7690 insn = PREV_INSN (insn))
7691 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7693 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7694 NOTE_LINE_NUMBER (insn),
7695 prologue_end);
7696 break;
7700 #endif
7701 #ifdef HAVE_epilogue
7702 if (epilogue_end)
7704 rtx insn, next;
7706 /* Similarly, move any line notes that appear after the epilogue.
7707 There is no need, however, to be quite so anal about the existence
7708 of such a note. */
7709 for (insn = epilogue_end; insn; insn = next)
7711 next = NEXT_INSN (insn);
7712 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7713 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7716 #endif
7719 /* Reposition the prologue-end and epilogue-begin notes after instruction
7720 scheduling and delayed branch scheduling. */
7722 void
7723 reposition_prologue_and_epilogue_notes (f)
7724 rtx f ATTRIBUTE_UNUSED;
7726 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7727 int len;
7729 if ((len = VARRAY_SIZE (prologue)) > 0)
7731 rtx insn, note = 0;
7733 /* Scan from the beginning until we reach the last prologue insn.
7734 We apparently can't depend on basic_block_{head,end} after
7735 reorg has run. */
7736 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7738 if (GET_CODE (insn) == NOTE)
7740 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7741 note = insn;
7743 else if ((len -= contains (insn, prologue)) == 0)
7745 rtx next;
7746 /* Find the prologue-end note if we haven't already, and
7747 move it to just after the last prologue insn. */
7748 if (note == 0)
7750 for (note = insn; (note = NEXT_INSN (note));)
7751 if (GET_CODE (note) == NOTE
7752 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7753 break;
7756 next = NEXT_INSN (note);
7758 /* Whether or not we can depend on BLOCK_HEAD,
7759 attempt to keep it up-to-date. */
7760 if (BLOCK_HEAD (0) == note)
7761 BLOCK_HEAD (0) = next;
7763 remove_insn (note);
7764 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7765 if (GET_CODE (insn) == CODE_LABEL)
7766 insn = NEXT_INSN (insn);
7767 add_insn_after (note, insn);
7772 if ((len = VARRAY_SIZE (epilogue)) > 0)
7774 rtx insn, note = 0;
7776 /* Scan from the end until we reach the first epilogue insn.
7777 We apparently can't depend on basic_block_{head,end} after
7778 reorg has run. */
7779 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7781 if (GET_CODE (insn) == NOTE)
7783 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7784 note = insn;
7786 else if ((len -= contains (insn, epilogue)) == 0)
7788 /* Find the epilogue-begin note if we haven't already, and
7789 move it to just before the first epilogue insn. */
7790 if (note == 0)
7792 for (note = insn; (note = PREV_INSN (note));)
7793 if (GET_CODE (note) == NOTE
7794 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7795 break;
7798 /* Whether or not we can depend on BLOCK_HEAD,
7799 attempt to keep it up-to-date. */
7800 if (n_basic_blocks
7801 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7802 BLOCK_HEAD (n_basic_blocks-1) = note;
7804 remove_insn (note);
7805 add_insn_before (note, insn);
7809 #endif /* HAVE_prologue or HAVE_epilogue */
7812 /* Mark P for GC. */
7814 static void
7815 mark_function_status (p)
7816 struct function *p;
7818 struct var_refs_queue *q;
7819 struct temp_slot *t;
7820 int i;
7821 rtx *r;
7823 if (p == 0)
7824 return;
7826 ggc_mark_rtx (p->arg_offset_rtx);
7828 if (p->x_parm_reg_stack_loc)
7829 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7830 i > 0; --i, ++r)
7831 ggc_mark_rtx (*r);
7833 ggc_mark_rtx (p->return_rtx);
7834 ggc_mark_rtx (p->x_cleanup_label);
7835 ggc_mark_rtx (p->x_return_label);
7836 ggc_mark_rtx (p->x_save_expr_regs);
7837 ggc_mark_rtx (p->x_stack_slot_list);
7838 ggc_mark_rtx (p->x_parm_birth_insn);
7839 ggc_mark_rtx (p->x_tail_recursion_label);
7840 ggc_mark_rtx (p->x_tail_recursion_reentry);
7841 ggc_mark_rtx (p->internal_arg_pointer);
7842 ggc_mark_rtx (p->x_arg_pointer_save_area);
7843 ggc_mark_tree (p->x_rtl_expr_chain);
7844 ggc_mark_rtx (p->x_last_parm_insn);
7845 ggc_mark_tree (p->x_context_display);
7846 ggc_mark_tree (p->x_trampoline_list);
7847 ggc_mark_rtx (p->epilogue_delay_list);
7848 ggc_mark_rtx (p->x_clobber_return_insn);
7850 for (t = p->x_temp_slots; t != 0; t = t->next)
7852 ggc_mark (t);
7853 ggc_mark_rtx (t->slot);
7854 ggc_mark_rtx (t->address);
7855 ggc_mark_tree (t->rtl_expr);
7856 ggc_mark_tree (t->type);
7859 for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
7861 ggc_mark (q);
7862 ggc_mark_rtx (q->modified);
7865 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7866 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7867 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7868 ggc_mark_tree (p->x_nonlocal_labels);
7870 mark_hard_reg_initial_vals (p);
7873 /* Mark the struct function pointed to by *ARG for GC, if it is not
7874 NULL. This is used to mark the current function and the outer
7875 function chain. */
7877 static void
7878 maybe_mark_struct_function (arg)
7879 void *arg;
7881 struct function *f = *(struct function **) arg;
7883 if (f == 0)
7884 return;
7886 ggc_mark_struct_function (f);
7889 /* Mark a struct function * for GC. This is called from ggc-common.c. */
7891 void
7892 ggc_mark_struct_function (f)
7893 struct function *f;
7895 ggc_mark (f);
7896 ggc_mark_tree (f->decl);
7898 mark_function_status (f);
7899 mark_eh_status (f->eh);
7900 mark_stmt_status (f->stmt);
7901 mark_expr_status (f->expr);
7902 mark_emit_status (f->emit);
7903 mark_varasm_status (f->varasm);
7905 if (mark_machine_status)
7906 (*mark_machine_status) (f);
7907 if (mark_lang_status)
7908 (*mark_lang_status) (f);
7910 if (f->original_arg_vector)
7911 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7912 if (f->original_decl_initial)
7913 ggc_mark_tree (f->original_decl_initial);
7914 if (f->outer)
7915 ggc_mark_struct_function (f->outer);
7918 /* Called once, at initialization, to initialize function.c. */
7920 void
7921 init_function_once ()
7923 ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
7924 ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
7925 maybe_mark_struct_function);
7927 VARRAY_INT_INIT (prologue, 0, "prologue");
7928 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7929 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");