cfgrtl.c (commit_one_edge_insertion): Fix warning.
[official-gcc.git] / gcc / function.c
blob669dd2ecf851f8ae8b4026d82f3f639b5701b721
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hash.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
62 #include "langhooks.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
76 #ifndef NAME__MAIN
77 #define NAME__MAIN "__main"
78 #define SYMBOL__MAIN __main
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
103 compiler passes. */
104 int current_function_is_leaf;
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
109 int current_function_nothrow;
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
127 /* Assign unique numbers to labels generated for profiling. */
128 static int profile_label_no;
130 /* These variables hold pointers to functions to create and destroy
131 target specific, per-function data structures. */
132 void (*init_machine_status) PARAMS ((struct function *));
133 void (*free_machine_status) PARAMS ((struct function *));
134 /* This variable holds a pointer to a function to register any
135 data items in the target specific, per-function data structure
136 that will need garbage collection. */
137 void (*mark_machine_status) PARAMS ((struct function *));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
142 /* The currently compiled function. */
143 struct function *cfun = 0;
145 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
146 static varray_type prologue;
147 static varray_type epilogue;
149 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
150 in this function. */
151 static varray_type sibcall_epilogue;
153 /* In order to evaluate some expressions, such as function calls returning
154 structures in memory, we need to temporarily allocate stack locations.
155 We record each allocated temporary in the following structure.
157 Associated with each temporary slot is a nesting level. When we pop up
158 one level, all temporaries associated with the previous level are freed.
159 Normally, all temporaries are freed after the execution of the statement
160 in which they were created. However, if we are inside a ({...}) grouping,
161 the result may be in a temporary and hence must be preserved. If the
162 result could be in a temporary, we preserve it if we can determine which
163 one it is in. If we cannot determine which temporary may contain the
164 result, all temporaries are preserved. A temporary is preserved by
165 pretending it was allocated at the previous nesting level.
167 Automatic variables are also assigned temporary slots, at the nesting
168 level where they are defined. They are marked a "kept" so that
169 free_temp_slots will not free them. */
171 struct temp_slot
173 /* Points to next temporary slot. */
174 struct temp_slot *next;
175 /* The rtx to used to reference the slot. */
176 rtx slot;
177 /* The rtx used to represent the address if not the address of the
178 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 rtx address;
180 /* The alignment (in bits) of the slot. */
181 unsigned int align;
182 /* The size, in units, of the slot. */
183 HOST_WIDE_INT size;
184 /* The type of the object in the slot, or zero if it doesn't correspond
185 to a type. We use this to determine whether a slot can be reused.
186 It can be reused if objects of the type of the new slot will always
187 conflict with objects of the type of the old slot. */
188 tree type;
189 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
190 tree rtl_expr;
191 /* Non-zero if this temporary is currently in use. */
192 char in_use;
193 /* Non-zero if this temporary has its address taken. */
194 char addr_taken;
195 /* Nesting level at which this slot is being used. */
196 int level;
197 /* Non-zero if this should survive a call to free_temp_slots. */
198 int keep;
199 /* The offset of the slot from the frame_pointer, including extra space
200 for alignment. This info is for combine_temp_slots. */
201 HOST_WIDE_INT base_offset;
202 /* The size of the slot, including extra space for alignment. This
203 info is for combine_temp_slots. */
204 HOST_WIDE_INT full_size;
207 /* This structure is used to record MEMs or pseudos used to replace VAR, any
208 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
209 maintain this list in case two operands of an insn were required to match;
210 in that case we must ensure we use the same replacement. */
212 struct fixup_replacement
214 rtx old;
215 rtx new;
216 struct fixup_replacement *next;
219 struct insns_for_mem_entry
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he;
223 /* These are the INSNS which reference the MEM. */
224 rtx insns;
227 /* Forward declarations. */
229 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
230 int, struct function *));
231 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
232 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
233 enum machine_mode, enum machine_mode,
234 int, unsigned int, int,
235 struct hash_table *));
236 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
237 enum machine_mode,
238 struct hash_table *));
239 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
240 struct hash_table *));
241 static struct fixup_replacement
242 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
243 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
244 int, int, rtx));
245 static void fixup_var_refs_insns_with_hash
246 PARAMS ((struct hash_table *, rtx,
247 enum machine_mode, int, rtx));
248 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
249 int, int, rtx));
250 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
251 struct fixup_replacement **, rtx));
252 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
253 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
254 int));
255 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
256 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
257 static void instantiate_decls PARAMS ((tree, int));
258 static void instantiate_decls_1 PARAMS ((tree, int));
259 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
260 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
261 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
262 static void delete_handlers PARAMS ((void));
263 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
264 struct args_size *));
265 #ifndef ARGS_GROW_DOWNWARD
266 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
267 tree));
268 #endif
269 static rtx round_trampoline_addr PARAMS ((rtx));
270 static rtx adjust_trampoline_addr PARAMS ((rtx));
271 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
272 static void reorder_blocks_0 PARAMS ((tree));
273 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
274 static void reorder_fix_fragments PARAMS ((tree));
275 static tree blocks_nreverse PARAMS ((tree));
276 static int all_blocks PARAMS ((tree, tree *));
277 static tree *get_block_vector PARAMS ((tree, int *));
278 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
279 /* We always define `record_insns' even if its not used so that we
280 can always export `prologue_epilogue_contains'. */
281 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
282 static int contains PARAMS ((rtx, varray_type));
283 #ifdef HAVE_return
284 static void emit_return_into_block PARAMS ((basic_block, rtx));
285 #endif
286 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
287 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
288 struct hash_table *));
289 static void purge_single_hard_subreg_set PARAMS ((rtx));
290 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
291 static rtx keep_stack_depressed PARAMS ((rtx));
292 #endif
293 static int is_addressof PARAMS ((rtx *, void *));
294 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
295 struct hash_table *,
296 hash_table_key));
297 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
298 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
299 static int insns_for_mem_walk PARAMS ((rtx *, void *));
300 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
301 static void mark_function_status PARAMS ((struct function *));
302 static void maybe_mark_struct_function PARAMS ((void *));
303 static void prepare_function_start PARAMS ((void));
304 static void do_clobber_return_reg PARAMS ((rtx, void *));
305 static void do_use_return_reg PARAMS ((rtx, void *));
307 /* Pointer to chain of `struct function' for containing functions. */
308 static struct function *outer_function_chain;
310 /* Given a function decl for a containing function,
311 return the `struct function' for it. */
313 struct function *
314 find_function_data (decl)
315 tree decl;
317 struct function *p;
319 for (p = outer_function_chain; p; p = p->outer)
320 if (p->decl == decl)
321 return p;
323 abort ();
326 /* Save the current context for compilation of a nested function.
327 This is called from language-specific code. The caller should use
328 the enter_nested langhook to save any language-specific state,
329 since this function knows only about language-independent
330 variables. */
332 void
333 push_function_context_to (context)
334 tree context;
336 struct function *p;
338 if (context)
340 if (context == current_function_decl)
341 cfun->contains_functions = 1;
342 else
344 struct function *containing = find_function_data (context);
345 containing->contains_functions = 1;
349 if (cfun == 0)
350 init_dummy_function_start ();
351 p = cfun;
353 p->outer = outer_function_chain;
354 outer_function_chain = p;
355 p->fixup_var_refs_queue = 0;
357 (*lang_hooks.function.enter_nested) (p);
359 cfun = 0;
362 void
363 push_function_context ()
365 push_function_context_to (current_function_decl);
368 /* Restore the last saved context, at the end of a nested function.
369 This function is called from language-specific code. */
371 void
372 pop_function_context_from (context)
373 tree context ATTRIBUTE_UNUSED;
375 struct function *p = outer_function_chain;
376 struct var_refs_queue *queue;
378 cfun = p;
379 outer_function_chain = p->outer;
381 current_function_decl = p->decl;
382 reg_renumber = 0;
384 restore_emit_status (p);
386 (*lang_hooks.function.leave_nested) (p);
388 /* Finish doing put_var_into_stack for any of our variables which became
389 addressable during the nested function. If only one entry has to be
390 fixed up, just do that one. Otherwise, first make a list of MEMs that
391 are not to be unshared. */
392 if (p->fixup_var_refs_queue == 0)
394 else if (p->fixup_var_refs_queue->next == 0)
395 fixup_var_refs (p->fixup_var_refs_queue->modified,
396 p->fixup_var_refs_queue->promoted_mode,
397 p->fixup_var_refs_queue->unsignedp,
398 p->fixup_var_refs_queue->modified, 0);
399 else
401 rtx list = 0;
403 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
404 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
406 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
407 fixup_var_refs (queue->modified, queue->promoted_mode,
408 queue->unsignedp, list, 0);
412 p->fixup_var_refs_queue = 0;
414 /* Reset variables that have known state during rtx generation. */
415 rtx_equal_function_value_matters = 1;
416 virtuals_instantiated = 0;
417 generating_concat_p = 1;
420 void
421 pop_function_context ()
423 pop_function_context_from (current_function_decl);
426 /* Clear out all parts of the state in F that can safely be discarded
427 after the function has been parsed, but not compiled, to let
428 garbage collection reclaim the memory. */
430 void
431 free_after_parsing (f)
432 struct function *f;
434 /* f->expr->forced_labels is used by code generation. */
435 /* f->emit->regno_reg_rtx is used by code generation. */
436 /* f->varasm is used by code generation. */
437 /* f->eh->eh_return_stub_label is used by code generation. */
439 (*lang_hooks.function.free) (f);
440 free_stmt_status (f);
443 /* Clear out all parts of the state in F that can safely be discarded
444 after the function has been compiled, to let garbage collection
445 reclaim the memory. */
447 void
448 free_after_compilation (f)
449 struct function *f;
451 free_eh_status (f);
452 free_expr_status (f);
453 free_emit_status (f);
454 free_varasm_status (f);
456 if (free_machine_status)
457 (*free_machine_status) (f);
459 if (f->x_parm_reg_stack_loc)
460 free (f->x_parm_reg_stack_loc);
462 f->x_temp_slots = NULL;
463 f->arg_offset_rtx = NULL;
464 f->return_rtx = NULL;
465 f->internal_arg_pointer = NULL;
466 f->x_nonlocal_labels = NULL;
467 f->x_nonlocal_goto_handler_slots = NULL;
468 f->x_nonlocal_goto_handler_labels = NULL;
469 f->x_nonlocal_goto_stack_level = NULL;
470 f->x_cleanup_label = NULL;
471 f->x_return_label = NULL;
472 f->x_save_expr_regs = NULL;
473 f->x_stack_slot_list = NULL;
474 f->x_rtl_expr_chain = NULL;
475 f->x_tail_recursion_label = NULL;
476 f->x_tail_recursion_reentry = NULL;
477 f->x_arg_pointer_save_area = NULL;
478 f->x_clobber_return_insn = NULL;
479 f->x_context_display = NULL;
480 f->x_trampoline_list = NULL;
481 f->x_parm_birth_insn = NULL;
482 f->x_last_parm_insn = NULL;
483 f->x_parm_reg_stack_loc = NULL;
484 f->fixup_var_refs_queue = NULL;
485 f->original_arg_vector = NULL;
486 f->original_decl_initial = NULL;
487 f->inl_last_parm_insn = NULL;
488 f->epilogue_delay_list = NULL;
491 /* Allocate fixed slots in the stack frame of the current function. */
493 /* Return size needed for stack frame based on slots so far allocated in
494 function F.
495 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
496 the caller may have to do that. */
498 HOST_WIDE_INT
499 get_func_frame_size (f)
500 struct function *f;
502 #ifdef FRAME_GROWS_DOWNWARD
503 return -f->x_frame_offset;
504 #else
505 return f->x_frame_offset;
506 #endif
509 /* Return size needed for stack frame based on slots so far allocated.
510 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
511 the caller may have to do that. */
512 HOST_WIDE_INT
513 get_frame_size ()
515 return get_func_frame_size (cfun);
518 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
519 with machine mode MODE.
521 ALIGN controls the amount of alignment for the address of the slot:
522 0 means according to MODE,
523 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
524 positive specifies alignment boundary in bits.
526 We do not round to stack_boundary here.
528 FUNCTION specifies the function to allocate in. */
530 static rtx
531 assign_stack_local_1 (mode, size, align, function)
532 enum machine_mode mode;
533 HOST_WIDE_INT size;
534 int align;
535 struct function *function;
537 rtx x, addr;
538 int bigend_correction = 0;
539 int alignment;
540 int frame_off, frame_alignment, frame_phase;
542 if (align == 0)
544 tree type;
546 if (mode == BLKmode)
547 alignment = BIGGEST_ALIGNMENT;
548 else
549 alignment = GET_MODE_ALIGNMENT (mode);
551 /* Allow the target to (possibly) increase the alignment of this
552 stack slot. */
553 type = (*lang_hooks.types.type_for_mode) (mode, 0);
554 if (type)
555 alignment = LOCAL_ALIGNMENT (type, alignment);
557 alignment /= BITS_PER_UNIT;
559 else if (align == -1)
561 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
562 size = CEIL_ROUND (size, alignment);
564 else
565 alignment = align / BITS_PER_UNIT;
567 #ifdef FRAME_GROWS_DOWNWARD
568 function->x_frame_offset -= size;
569 #endif
571 /* Ignore alignment we can't do with expected alignment of the boundary. */
572 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
573 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
575 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
576 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
578 /* Calculate how many bytes the start of local variables is off from
579 stack alignment. */
580 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
581 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
582 frame_phase = frame_off ? frame_alignment - frame_off : 0;
584 /* Round frame offset to that alignment.
585 We must be careful here, since FRAME_OFFSET might be negative and
586 division with a negative dividend isn't as well defined as we might
587 like. So we instead assume that ALIGNMENT is a power of two and
588 use logical operations which are unambiguous. */
589 #ifdef FRAME_GROWS_DOWNWARD
590 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
591 #else
592 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
593 #endif
595 /* On a big-endian machine, if we are allocating more space than we will use,
596 use the least significant bytes of those that are allocated. */
597 if (BYTES_BIG_ENDIAN && mode != BLKmode)
598 bigend_correction = size - GET_MODE_SIZE (mode);
600 /* If we have already instantiated virtual registers, return the actual
601 address relative to the frame pointer. */
602 if (function == cfun && virtuals_instantiated)
603 addr = plus_constant (frame_pointer_rtx,
604 (frame_offset + bigend_correction
605 + STARTING_FRAME_OFFSET));
606 else
607 addr = plus_constant (virtual_stack_vars_rtx,
608 function->x_frame_offset + bigend_correction);
610 #ifndef FRAME_GROWS_DOWNWARD
611 function->x_frame_offset += size;
612 #endif
614 x = gen_rtx_MEM (mode, addr);
616 function->x_stack_slot_list
617 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
619 return x;
622 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
623 current function. */
626 assign_stack_local (mode, size, align)
627 enum machine_mode mode;
628 HOST_WIDE_INT size;
629 int align;
631 return assign_stack_local_1 (mode, size, align, cfun);
634 /* Allocate a temporary stack slot and record it for possible later
635 reuse.
637 MODE is the machine mode to be given to the returned rtx.
639 SIZE is the size in units of the space required. We do no rounding here
640 since assign_stack_local will do any required rounding.
642 KEEP is 1 if this slot is to be retained after a call to
643 free_temp_slots. Automatic variables for a block are allocated
644 with this flag. KEEP is 2 if we allocate a longer term temporary,
645 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
646 if we are to allocate something at an inner level to be treated as
647 a variable in the block (e.g., a SAVE_EXPR).
649 TYPE is the type that will be used for the stack slot. */
652 assign_stack_temp_for_type (mode, size, keep, type)
653 enum machine_mode mode;
654 HOST_WIDE_INT size;
655 int keep;
656 tree type;
658 unsigned int align;
659 struct temp_slot *p, *best_p = 0;
661 /* If SIZE is -1 it means that somebody tried to allocate a temporary
662 of a variable size. */
663 if (size == -1)
664 abort ();
666 if (mode == BLKmode)
667 align = BIGGEST_ALIGNMENT;
668 else
669 align = GET_MODE_ALIGNMENT (mode);
671 if (! type)
672 type = (*lang_hooks.types.type_for_mode) (mode, 0);
674 if (type)
675 align = LOCAL_ALIGNMENT (type, align);
677 /* Try to find an available, already-allocated temporary of the proper
678 mode which meets the size and alignment requirements. Choose the
679 smallest one with the closest alignment. */
680 for (p = temp_slots; p; p = p->next)
681 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
682 && ! p->in_use
683 && objects_must_conflict_p (p->type, type)
684 && (best_p == 0 || best_p->size > p->size
685 || (best_p->size == p->size && best_p->align > p->align)))
687 if (p->align == align && p->size == size)
689 best_p = 0;
690 break;
692 best_p = p;
695 /* Make our best, if any, the one to use. */
696 if (best_p)
698 /* If there are enough aligned bytes left over, make them into a new
699 temp_slot so that the extra bytes don't get wasted. Do this only
700 for BLKmode slots, so that we can be sure of the alignment. */
701 if (GET_MODE (best_p->slot) == BLKmode)
703 int alignment = best_p->align / BITS_PER_UNIT;
704 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
706 if (best_p->size - rounded_size >= alignment)
708 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
709 p->in_use = p->addr_taken = 0;
710 p->size = best_p->size - rounded_size;
711 p->base_offset = best_p->base_offset + rounded_size;
712 p->full_size = best_p->full_size - rounded_size;
713 p->slot = gen_rtx_MEM (BLKmode,
714 plus_constant (XEXP (best_p->slot, 0),
715 rounded_size));
716 p->align = best_p->align;
717 p->address = 0;
718 p->rtl_expr = 0;
719 p->type = best_p->type;
720 p->next = temp_slots;
721 temp_slots = p;
723 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
724 stack_slot_list);
726 best_p->size = rounded_size;
727 best_p->full_size = rounded_size;
731 p = best_p;
734 /* If we still didn't find one, make a new temporary. */
735 if (p == 0)
737 HOST_WIDE_INT frame_offset_old = frame_offset;
739 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
741 /* We are passing an explicit alignment request to assign_stack_local.
742 One side effect of that is assign_stack_local will not round SIZE
743 to ensure the frame offset remains suitably aligned.
745 So for requests which depended on the rounding of SIZE, we go ahead
746 and round it now. We also make sure ALIGNMENT is at least
747 BIGGEST_ALIGNMENT. */
748 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
749 abort ();
750 p->slot = assign_stack_local (mode,
751 (mode == BLKmode
752 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
753 : size),
754 align);
756 p->align = align;
758 /* The following slot size computation is necessary because we don't
759 know the actual size of the temporary slot until assign_stack_local
760 has performed all the frame alignment and size rounding for the
761 requested temporary. Note that extra space added for alignment
762 can be either above or below this stack slot depending on which
763 way the frame grows. We include the extra space if and only if it
764 is above this slot. */
765 #ifdef FRAME_GROWS_DOWNWARD
766 p->size = frame_offset_old - frame_offset;
767 #else
768 p->size = size;
769 #endif
771 /* Now define the fields used by combine_temp_slots. */
772 #ifdef FRAME_GROWS_DOWNWARD
773 p->base_offset = frame_offset;
774 p->full_size = frame_offset_old - frame_offset;
775 #else
776 p->base_offset = frame_offset_old;
777 p->full_size = frame_offset - frame_offset_old;
778 #endif
779 p->address = 0;
780 p->next = temp_slots;
781 temp_slots = p;
784 p->in_use = 1;
785 p->addr_taken = 0;
786 p->rtl_expr = seq_rtl_expr;
787 p->type = type;
789 if (keep == 2)
791 p->level = target_temp_slot_level;
792 p->keep = 0;
794 else if (keep == 3)
796 p->level = var_temp_slot_level;
797 p->keep = 0;
799 else
801 p->level = temp_slot_level;
802 p->keep = keep;
805 /* We may be reusing an old slot, so clear any MEM flags that may have been
806 set from before. */
807 RTX_UNCHANGING_P (p->slot) = 0;
808 MEM_IN_STRUCT_P (p->slot) = 0;
809 MEM_SCALAR_P (p->slot) = 0;
810 MEM_VOLATILE_P (p->slot) = 0;
811 set_mem_alias_set (p->slot, 0);
813 /* If we know the alias set for the memory that will be used, use
814 it. If there's no TYPE, then we don't know anything about the
815 alias set for the memory. */
816 set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
817 set_mem_align (p->slot, align);
819 /* If a type is specified, set the relevant flags. */
820 if (type != 0)
822 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
823 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
824 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
827 return p->slot;
830 /* Allocate a temporary stack slot and record it for possible later
831 reuse. First three arguments are same as in preceding function. */
834 assign_stack_temp (mode, size, keep)
835 enum machine_mode mode;
836 HOST_WIDE_INT size;
837 int keep;
839 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
842 /* Assign a temporary.
843 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
844 and so that should be used in error messages. In either case, we
845 allocate of the given type.
846 KEEP is as for assign_stack_temp.
847 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
848 it is 0 if a register is OK.
849 DONT_PROMOTE is 1 if we should not promote values in register
850 to wider modes. */
853 assign_temp (type_or_decl, keep, memory_required, dont_promote)
854 tree type_or_decl;
855 int keep;
856 int memory_required;
857 int dont_promote ATTRIBUTE_UNUSED;
859 tree type, decl;
860 enum machine_mode mode;
861 #ifndef PROMOTE_FOR_CALL_ONLY
862 int unsignedp;
863 #endif
865 if (DECL_P (type_or_decl))
866 decl = type_or_decl, type = TREE_TYPE (decl);
867 else
868 decl = NULL, type = type_or_decl;
870 mode = TYPE_MODE (type);
871 #ifndef PROMOTE_FOR_CALL_ONLY
872 unsignedp = TREE_UNSIGNED (type);
873 #endif
875 if (mode == BLKmode || memory_required)
877 HOST_WIDE_INT size = int_size_in_bytes (type);
878 rtx tmp;
880 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
881 problems with allocating the stack space. */
882 if (size == 0)
883 size = 1;
885 /* Unfortunately, we don't yet know how to allocate variable-sized
886 temporaries. However, sometimes we have a fixed upper limit on
887 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
888 instead. This is the case for Chill variable-sized strings. */
889 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
890 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
891 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
892 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
894 /* The size of the temporary may be too large to fit into an integer. */
895 /* ??? Not sure this should happen except for user silliness, so limit
896 this to things that aren't compiler-generated temporaries. The
897 rest of the time we'll abort in assign_stack_temp_for_type. */
898 if (decl && size == -1
899 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
901 error_with_decl (decl, "size of variable `%s' is too large");
902 size = 1;
905 tmp = assign_stack_temp_for_type (mode, size, keep, type);
906 return tmp;
909 #ifndef PROMOTE_FOR_CALL_ONLY
910 if (! dont_promote)
911 mode = promote_mode (type, mode, &unsignedp, 0);
912 #endif
914 return gen_reg_rtx (mode);
917 /* Combine temporary stack slots which are adjacent on the stack.
919 This allows for better use of already allocated stack space. This is only
920 done for BLKmode slots because we can be sure that we won't have alignment
921 problems in this case. */
923 void
924 combine_temp_slots ()
926 struct temp_slot *p, *q;
927 struct temp_slot *prev_p, *prev_q;
928 int num_slots;
930 /* We can't combine slots, because the information about which slot
931 is in which alias set will be lost. */
932 if (flag_strict_aliasing)
933 return;
935 /* If there are a lot of temp slots, don't do anything unless
936 high levels of optimization. */
937 if (! flag_expensive_optimizations)
938 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
939 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
940 return;
942 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
944 int delete_p = 0;
946 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
947 for (q = p->next, prev_q = p; q; q = prev_q->next)
949 int delete_q = 0;
950 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
952 if (p->base_offset + p->full_size == q->base_offset)
954 /* Q comes after P; combine Q into P. */
955 p->size += q->size;
956 p->full_size += q->full_size;
957 delete_q = 1;
959 else if (q->base_offset + q->full_size == p->base_offset)
961 /* P comes after Q; combine P into Q. */
962 q->size += p->size;
963 q->full_size += p->full_size;
964 delete_p = 1;
965 break;
968 /* Either delete Q or advance past it. */
969 if (delete_q)
970 prev_q->next = q->next;
971 else
972 prev_q = q;
974 /* Either delete P or advance past it. */
975 if (delete_p)
977 if (prev_p)
978 prev_p->next = p->next;
979 else
980 temp_slots = p->next;
982 else
983 prev_p = p;
987 /* Find the temp slot corresponding to the object at address X. */
989 static struct temp_slot *
990 find_temp_slot_from_address (x)
991 rtx x;
993 struct temp_slot *p;
994 rtx next;
996 for (p = temp_slots; p; p = p->next)
998 if (! p->in_use)
999 continue;
1001 else if (XEXP (p->slot, 0) == x
1002 || p->address == x
1003 || (GET_CODE (x) == PLUS
1004 && XEXP (x, 0) == virtual_stack_vars_rtx
1005 && GET_CODE (XEXP (x, 1)) == CONST_INT
1006 && INTVAL (XEXP (x, 1)) >= p->base_offset
1007 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1008 return p;
1010 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1011 for (next = p->address; next; next = XEXP (next, 1))
1012 if (XEXP (next, 0) == x)
1013 return p;
1016 /* If we have a sum involving a register, see if it points to a temp
1017 slot. */
1018 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1019 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1020 return p;
1021 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1022 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1023 return p;
1025 return 0;
1028 /* Indicate that NEW is an alternate way of referring to the temp slot
1029 that previously was known by OLD. */
1031 void
1032 update_temp_slot_address (old, new)
1033 rtx old, new;
1035 struct temp_slot *p;
1037 if (rtx_equal_p (old, new))
1038 return;
1040 p = find_temp_slot_from_address (old);
1042 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1043 is a register, see if one operand of the PLUS is a temporary
1044 location. If so, NEW points into it. Otherwise, if both OLD and
1045 NEW are a PLUS and if there is a register in common between them.
1046 If so, try a recursive call on those values. */
1047 if (p == 0)
1049 if (GET_CODE (old) != PLUS)
1050 return;
1052 if (GET_CODE (new) == REG)
1054 update_temp_slot_address (XEXP (old, 0), new);
1055 update_temp_slot_address (XEXP (old, 1), new);
1056 return;
1058 else if (GET_CODE (new) != PLUS)
1059 return;
1061 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1062 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1063 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1064 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1065 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1066 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1067 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1068 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1070 return;
1073 /* Otherwise add an alias for the temp's address. */
1074 else if (p->address == 0)
1075 p->address = new;
1076 else
1078 if (GET_CODE (p->address) != EXPR_LIST)
1079 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1081 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1085 /* If X could be a reference to a temporary slot, mark the fact that its
1086 address was taken. */
1088 void
1089 mark_temp_addr_taken (x)
1090 rtx x;
1092 struct temp_slot *p;
1094 if (x == 0)
1095 return;
1097 /* If X is not in memory or is at a constant address, it cannot be in
1098 a temporary slot. */
1099 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1100 return;
1102 p = find_temp_slot_from_address (XEXP (x, 0));
1103 if (p != 0)
1104 p->addr_taken = 1;
1107 /* If X could be a reference to a temporary slot, mark that slot as
1108 belonging to the to one level higher than the current level. If X
1109 matched one of our slots, just mark that one. Otherwise, we can't
1110 easily predict which it is, so upgrade all of them. Kept slots
1111 need not be touched.
1113 This is called when an ({...}) construct occurs and a statement
1114 returns a value in memory. */
1116 void
1117 preserve_temp_slots (x)
1118 rtx x;
1120 struct temp_slot *p = 0;
1122 /* If there is no result, we still might have some objects whose address
1123 were taken, so we need to make sure they stay around. */
1124 if (x == 0)
1126 for (p = temp_slots; p; p = p->next)
1127 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1128 p->level--;
1130 return;
1133 /* If X is a register that is being used as a pointer, see if we have
1134 a temporary slot we know it points to. To be consistent with
1135 the code below, we really should preserve all non-kept slots
1136 if we can't find a match, but that seems to be much too costly. */
1137 if (GET_CODE (x) == REG && REG_POINTER (x))
1138 p = find_temp_slot_from_address (x);
1140 /* If X is not in memory or is at a constant address, it cannot be in
1141 a temporary slot, but it can contain something whose address was
1142 taken. */
1143 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1145 for (p = temp_slots; p; p = p->next)
1146 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1147 p->level--;
1149 return;
1152 /* First see if we can find a match. */
1153 if (p == 0)
1154 p = find_temp_slot_from_address (XEXP (x, 0));
1156 if (p != 0)
1158 /* Move everything at our level whose address was taken to our new
1159 level in case we used its address. */
1160 struct temp_slot *q;
1162 if (p->level == temp_slot_level)
1164 for (q = temp_slots; q; q = q->next)
1165 if (q != p && q->addr_taken && q->level == p->level)
1166 q->level--;
1168 p->level--;
1169 p->addr_taken = 0;
1171 return;
1174 /* Otherwise, preserve all non-kept slots at this level. */
1175 for (p = temp_slots; p; p = p->next)
1176 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1177 p->level--;
1180 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1181 with that RTL_EXPR, promote it into a temporary slot at the present
1182 level so it will not be freed when we free slots made in the
1183 RTL_EXPR. */
1185 void
1186 preserve_rtl_expr_result (x)
1187 rtx x;
1189 struct temp_slot *p;
1191 /* If X is not in memory or is at a constant address, it cannot be in
1192 a temporary slot. */
1193 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1194 return;
1196 /* If we can find a match, move it to our level unless it is already at
1197 an upper level. */
1198 p = find_temp_slot_from_address (XEXP (x, 0));
1199 if (p != 0)
1201 p->level = MIN (p->level, temp_slot_level);
1202 p->rtl_expr = 0;
1205 return;
1208 /* Free all temporaries used so far. This is normally called at the end
1209 of generating code for a statement. Don't free any temporaries
1210 currently in use for an RTL_EXPR that hasn't yet been emitted.
1211 We could eventually do better than this since it can be reused while
1212 generating the same RTL_EXPR, but this is complex and probably not
1213 worthwhile. */
1215 void
1216 free_temp_slots ()
1218 struct temp_slot *p;
1220 for (p = temp_slots; p; p = p->next)
1221 if (p->in_use && p->level == temp_slot_level && ! p->keep
1222 && p->rtl_expr == 0)
1223 p->in_use = 0;
1225 combine_temp_slots ();
1228 /* Free all temporary slots used in T, an RTL_EXPR node. */
1230 void
1231 free_temps_for_rtl_expr (t)
1232 tree t;
1234 struct temp_slot *p;
1236 for (p = temp_slots; p; p = p->next)
1237 if (p->rtl_expr == t)
1239 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1240 needs to be preserved. This can happen if a temporary in
1241 the RTL_EXPR was addressed; preserve_temp_slots will move
1242 the temporary into a higher level. */
1243 if (temp_slot_level <= p->level)
1244 p->in_use = 0;
1245 else
1246 p->rtl_expr = NULL_TREE;
1249 combine_temp_slots ();
1252 /* Mark all temporaries ever allocated in this function as not suitable
1253 for reuse until the current level is exited. */
1255 void
1256 mark_all_temps_used ()
1258 struct temp_slot *p;
1260 for (p = temp_slots; p; p = p->next)
1262 p->in_use = p->keep = 1;
1263 p->level = MIN (p->level, temp_slot_level);
1267 /* Push deeper into the nesting level for stack temporaries. */
1269 void
1270 push_temp_slots ()
1272 temp_slot_level++;
1275 /* Likewise, but save the new level as the place to allocate variables
1276 for blocks. */
1278 #if 0
1279 void
1280 push_temp_slots_for_block ()
1282 push_temp_slots ();
1284 var_temp_slot_level = temp_slot_level;
1287 /* Likewise, but save the new level as the place to allocate temporaries
1288 for TARGET_EXPRs. */
1290 void
1291 push_temp_slots_for_target ()
1293 push_temp_slots ();
1295 target_temp_slot_level = temp_slot_level;
1298 /* Set and get the value of target_temp_slot_level. The only
1299 permitted use of these functions is to save and restore this value. */
1302 get_target_temp_slot_level ()
1304 return target_temp_slot_level;
1307 void
1308 set_target_temp_slot_level (level)
1309 int level;
1311 target_temp_slot_level = level;
1313 #endif
1315 /* Pop a temporary nesting level. All slots in use in the current level
1316 are freed. */
1318 void
1319 pop_temp_slots ()
1321 struct temp_slot *p;
1323 for (p = temp_slots; p; p = p->next)
1324 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1325 p->in_use = 0;
1327 combine_temp_slots ();
1329 temp_slot_level--;
1332 /* Initialize temporary slots. */
1334 void
1335 init_temp_slots ()
1337 /* We have not allocated any temporaries yet. */
1338 temp_slots = 0;
1339 temp_slot_level = 0;
1340 var_temp_slot_level = 0;
1341 target_temp_slot_level = 0;
1344 /* Retroactively move an auto variable from a register to a stack slot.
1345 This is done when an address-reference to the variable is seen. */
1347 void
1348 put_var_into_stack (decl)
1349 tree decl;
1351 rtx reg;
1352 enum machine_mode promoted_mode, decl_mode;
1353 struct function *function = 0;
1354 tree context;
1355 int can_use_addressof;
1356 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1357 int usedp = (TREE_USED (decl)
1358 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1360 context = decl_function_context (decl);
1362 /* Get the current rtl used for this object and its original mode. */
1363 reg = (TREE_CODE (decl) == SAVE_EXPR
1364 ? SAVE_EXPR_RTL (decl)
1365 : DECL_RTL_IF_SET (decl));
1367 /* No need to do anything if decl has no rtx yet
1368 since in that case caller is setting TREE_ADDRESSABLE
1369 and a stack slot will be assigned when the rtl is made. */
1370 if (reg == 0)
1371 return;
1373 /* Get the declared mode for this object. */
1374 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1375 : DECL_MODE (decl));
1376 /* Get the mode it's actually stored in. */
1377 promoted_mode = GET_MODE (reg);
1379 /* If this variable comes from an outer function, find that
1380 function's saved context. Don't use find_function_data here,
1381 because it might not be in any active function.
1382 FIXME: Is that really supposed to happen?
1383 It does in ObjC at least. */
1384 if (context != current_function_decl && context != inline_function_decl)
1385 for (function = outer_function_chain; function; function = function->outer)
1386 if (function->decl == context)
1387 break;
1389 /* If this is a variable-size object with a pseudo to address it,
1390 put that pseudo into the stack, if the var is nonlocal. */
1391 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1392 && GET_CODE (reg) == MEM
1393 && GET_CODE (XEXP (reg, 0)) == REG
1394 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1396 reg = XEXP (reg, 0);
1397 decl_mode = promoted_mode = GET_MODE (reg);
1400 can_use_addressof
1401 = (function == 0
1402 && optimize > 0
1403 /* FIXME make it work for promoted modes too */
1404 && decl_mode == promoted_mode
1405 #ifdef NON_SAVING_SETJMP
1406 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1407 #endif
1410 /* If we can't use ADDRESSOF, make sure we see through one we already
1411 generated. */
1412 if (! can_use_addressof && GET_CODE (reg) == MEM
1413 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1414 reg = XEXP (XEXP (reg, 0), 0);
1416 /* Now we should have a value that resides in one or more pseudo regs. */
1418 if (GET_CODE (reg) == REG)
1420 /* If this variable lives in the current function and we don't need
1421 to put things in the stack for the sake of setjmp, try to keep it
1422 in a register until we know we actually need the address. */
1423 if (can_use_addressof)
1424 gen_mem_addressof (reg, decl);
1425 else
1426 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1427 decl_mode, volatilep, 0, usedp, 0);
1429 else if (GET_CODE (reg) == CONCAT)
1431 /* A CONCAT contains two pseudos; put them both in the stack.
1432 We do it so they end up consecutive.
1433 We fixup references to the parts only after we fixup references
1434 to the whole CONCAT, lest we do double fixups for the latter
1435 references. */
1436 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1437 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1438 rtx lopart = XEXP (reg, 0);
1439 rtx hipart = XEXP (reg, 1);
1440 #ifdef FRAME_GROWS_DOWNWARD
1441 /* Since part 0 should have a lower address, do it second. */
1442 put_reg_into_stack (function, hipart, part_type, part_mode,
1443 part_mode, volatilep, 0, 0, 0);
1444 put_reg_into_stack (function, lopart, part_type, part_mode,
1445 part_mode, volatilep, 0, 0, 0);
1446 #else
1447 put_reg_into_stack (function, lopart, part_type, part_mode,
1448 part_mode, volatilep, 0, 0, 0);
1449 put_reg_into_stack (function, hipart, part_type, part_mode,
1450 part_mode, volatilep, 0, 0, 0);
1451 #endif
1453 /* Change the CONCAT into a combined MEM for both parts. */
1454 PUT_CODE (reg, MEM);
1455 MEM_ATTRS (reg) = 0;
1457 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1458 already computed alias sets. Here we want to re-generate. */
1459 if (DECL_P (decl))
1460 SET_DECL_RTL (decl, NULL);
1461 set_mem_attributes (reg, decl, 1);
1462 if (DECL_P (decl))
1463 SET_DECL_RTL (decl, reg);
1465 /* The two parts are in memory order already.
1466 Use the lower parts address as ours. */
1467 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1468 /* Prevent sharing of rtl that might lose. */
1469 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1470 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1471 if (usedp)
1473 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1474 promoted_mode, 0);
1475 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1476 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1479 else
1480 return;
1483 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1484 into the stack frame of FUNCTION (0 means the current function).
1485 DECL_MODE is the machine mode of the user-level data type.
1486 PROMOTED_MODE is the machine mode of the register.
1487 VOLATILE_P is nonzero if this is for a "volatile" decl.
1488 USED_P is nonzero if this reg might have already been used in an insn. */
1490 static void
1491 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1492 original_regno, used_p, ht)
1493 struct function *function;
1494 rtx reg;
1495 tree type;
1496 enum machine_mode promoted_mode, decl_mode;
1497 int volatile_p;
1498 unsigned int original_regno;
1499 int used_p;
1500 struct hash_table *ht;
1502 struct function *func = function ? function : cfun;
1503 rtx new = 0;
1504 unsigned int regno = original_regno;
1506 if (regno == 0)
1507 regno = REGNO (reg);
1509 if (regno < func->x_max_parm_reg)
1510 new = func->x_parm_reg_stack_loc[regno];
1512 if (new == 0)
1513 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1515 PUT_CODE (reg, MEM);
1516 PUT_MODE (reg, decl_mode);
1517 XEXP (reg, 0) = XEXP (new, 0);
1518 MEM_ATTRS (reg) = 0;
1519 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1520 MEM_VOLATILE_P (reg) = volatile_p;
1522 /* If this is a memory ref that contains aggregate components,
1523 mark it as such for cse and loop optimize. If we are reusing a
1524 previously generated stack slot, then we need to copy the bit in
1525 case it was set for other reasons. For instance, it is set for
1526 __builtin_va_alist. */
1527 if (type)
1529 MEM_SET_IN_STRUCT_P (reg,
1530 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1531 set_mem_alias_set (reg, get_alias_set (type));
1534 if (used_p)
1535 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1538 /* Make sure that all refs to the variable, previously made
1539 when it was a register, are fixed up to be valid again.
1540 See function above for meaning of arguments. */
1542 static void
1543 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1544 struct function *function;
1545 rtx reg;
1546 tree type;
1547 enum machine_mode promoted_mode;
1548 struct hash_table *ht;
1550 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1552 if (function != 0)
1554 struct var_refs_queue *temp;
1556 temp
1557 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1558 temp->modified = reg;
1559 temp->promoted_mode = promoted_mode;
1560 temp->unsignedp = unsigned_p;
1561 temp->next = function->fixup_var_refs_queue;
1562 function->fixup_var_refs_queue = temp;
1564 else
1565 /* Variable is local; fix it up now. */
1566 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1569 static void
1570 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1571 rtx var;
1572 enum machine_mode promoted_mode;
1573 int unsignedp;
1574 struct hash_table *ht;
1575 rtx may_share;
1577 tree pending;
1578 rtx first_insn = get_insns ();
1579 struct sequence_stack *stack = seq_stack;
1580 tree rtl_exps = rtl_expr_chain;
1582 /* If there's a hash table, it must record all uses of VAR. */
1583 if (ht)
1585 if (stack != 0)
1586 abort ();
1587 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1588 may_share);
1589 return;
1592 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1593 stack == 0, may_share);
1595 /* Scan all pending sequences too. */
1596 for (; stack; stack = stack->next)
1598 push_to_full_sequence (stack->first, stack->last);
1599 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1600 stack->next != 0, may_share);
1601 /* Update remembered end of sequence
1602 in case we added an insn at the end. */
1603 stack->last = get_last_insn ();
1604 end_sequence ();
1607 /* Scan all waiting RTL_EXPRs too. */
1608 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1610 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1611 if (seq != const0_rtx && seq != 0)
1613 push_to_sequence (seq);
1614 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1615 may_share);
1616 end_sequence ();
1621 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1622 some part of an insn. Return a struct fixup_replacement whose OLD
1623 value is equal to X. Allocate a new structure if no such entry exists. */
1625 static struct fixup_replacement *
1626 find_fixup_replacement (replacements, x)
1627 struct fixup_replacement **replacements;
1628 rtx x;
1630 struct fixup_replacement *p;
1632 /* See if we have already replaced this. */
1633 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1636 if (p == 0)
1638 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1639 p->old = x;
1640 p->new = 0;
1641 p->next = *replacements;
1642 *replacements = p;
1645 return p;
1648 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1649 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1650 for the current function. MAY_SHARE is either a MEM that is not
1651 to be unshared or a list of them. */
1653 static void
1654 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1655 rtx insn;
1656 rtx var;
1657 enum machine_mode promoted_mode;
1658 int unsignedp;
1659 int toplevel;
1660 rtx may_share;
1662 while (insn)
1664 /* fixup_var_refs_insn might modify insn, so save its next
1665 pointer now. */
1666 rtx next = NEXT_INSN (insn);
1668 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1669 the three sequences they (potentially) contain, and process
1670 them recursively. The CALL_INSN itself is not interesting. */
1672 if (GET_CODE (insn) == CALL_INSN
1673 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1675 int i;
1677 /* Look at the Normal call, sibling call and tail recursion
1678 sequences attached to the CALL_PLACEHOLDER. */
1679 for (i = 0; i < 3; i++)
1681 rtx seq = XEXP (PATTERN (insn), i);
1682 if (seq)
1684 push_to_sequence (seq);
1685 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1686 may_share);
1687 XEXP (PATTERN (insn), i) = get_insns ();
1688 end_sequence ();
1693 else if (INSN_P (insn))
1694 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1695 may_share);
1697 insn = next;
1701 /* Look up the insns which reference VAR in HT and fix them up. Other
1702 arguments are the same as fixup_var_refs_insns.
1704 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1705 because the hash table will point straight to the interesting insn
1706 (inside the CALL_PLACEHOLDER). */
1708 static void
1709 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1710 struct hash_table *ht;
1711 rtx var;
1712 enum machine_mode promoted_mode;
1713 int unsignedp;
1714 rtx may_share;
1716 struct insns_for_mem_entry *ime
1717 = (struct insns_for_mem_entry *) hash_lookup (ht, var,
1718 /*create=*/0, /*copy=*/0);
1719 rtx insn_list;
1721 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1722 if (INSN_P (XEXP (insn_list, 0)))
1723 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1724 unsignedp, 1, may_share);
1728 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1729 the insn under examination, VAR is the variable to fix up
1730 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1731 TOPLEVEL is nonzero if this is the main insn chain for this
1732 function. */
1734 static void
1735 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1736 rtx insn;
1737 rtx var;
1738 enum machine_mode promoted_mode;
1739 int unsignedp;
1740 int toplevel;
1741 rtx no_share;
1743 rtx call_dest = 0;
1744 rtx set, prev, prev_set;
1745 rtx note;
1747 /* Remember the notes in case we delete the insn. */
1748 note = REG_NOTES (insn);
1750 /* If this is a CLOBBER of VAR, delete it.
1752 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1753 and REG_RETVAL notes too. */
1754 if (GET_CODE (PATTERN (insn)) == CLOBBER
1755 && (XEXP (PATTERN (insn), 0) == var
1756 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1757 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1758 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1760 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1761 /* The REG_LIBCALL note will go away since we are going to
1762 turn INSN into a NOTE, so just delete the
1763 corresponding REG_RETVAL note. */
1764 remove_note (XEXP (note, 0),
1765 find_reg_note (XEXP (note, 0), REG_RETVAL,
1766 NULL_RTX));
1768 delete_insn (insn);
1771 /* The insn to load VAR from a home in the arglist
1772 is now a no-op. When we see it, just delete it.
1773 Similarly if this is storing VAR from a register from which
1774 it was loaded in the previous insn. This will occur
1775 when an ADDRESSOF was made for an arglist slot. */
1776 else if (toplevel
1777 && (set = single_set (insn)) != 0
1778 && SET_DEST (set) == var
1779 /* If this represents the result of an insn group,
1780 don't delete the insn. */
1781 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1782 && (rtx_equal_p (SET_SRC (set), var)
1783 || (GET_CODE (SET_SRC (set)) == REG
1784 && (prev = prev_nonnote_insn (insn)) != 0
1785 && (prev_set = single_set (prev)) != 0
1786 && SET_DEST (prev_set) == SET_SRC (set)
1787 && rtx_equal_p (SET_SRC (prev_set), var))))
1789 delete_insn (insn);
1791 else
1793 struct fixup_replacement *replacements = 0;
1794 rtx next_insn = NEXT_INSN (insn);
1796 if (SMALL_REGISTER_CLASSES)
1798 /* If the insn that copies the results of a CALL_INSN
1799 into a pseudo now references VAR, we have to use an
1800 intermediate pseudo since we want the life of the
1801 return value register to be only a single insn.
1803 If we don't use an intermediate pseudo, such things as
1804 address computations to make the address of VAR valid
1805 if it is not can be placed between the CALL_INSN and INSN.
1807 To make sure this doesn't happen, we record the destination
1808 of the CALL_INSN and see if the next insn uses both that
1809 and VAR. */
1811 if (call_dest != 0 && GET_CODE (insn) == INSN
1812 && reg_mentioned_p (var, PATTERN (insn))
1813 && reg_mentioned_p (call_dest, PATTERN (insn)))
1815 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1817 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1819 PATTERN (insn) = replace_rtx (PATTERN (insn),
1820 call_dest, temp);
1823 if (GET_CODE (insn) == CALL_INSN
1824 && GET_CODE (PATTERN (insn)) == SET)
1825 call_dest = SET_DEST (PATTERN (insn));
1826 else if (GET_CODE (insn) == CALL_INSN
1827 && GET_CODE (PATTERN (insn)) == PARALLEL
1828 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1829 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1830 else
1831 call_dest = 0;
1834 /* See if we have to do anything to INSN now that VAR is in
1835 memory. If it needs to be loaded into a pseudo, use a single
1836 pseudo for the entire insn in case there is a MATCH_DUP
1837 between two operands. We pass a pointer to the head of
1838 a list of struct fixup_replacements. If fixup_var_refs_1
1839 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1840 it will record them in this list.
1842 If it allocated a pseudo for any replacement, we copy into
1843 it here. */
1845 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1846 &replacements, no_share);
1848 /* If this is last_parm_insn, and any instructions were output
1849 after it to fix it up, then we must set last_parm_insn to
1850 the last such instruction emitted. */
1851 if (insn == last_parm_insn)
1852 last_parm_insn = PREV_INSN (next_insn);
1854 while (replacements)
1856 struct fixup_replacement *next;
1858 if (GET_CODE (replacements->new) == REG)
1860 rtx insert_before;
1861 rtx seq;
1863 /* OLD might be a (subreg (mem)). */
1864 if (GET_CODE (replacements->old) == SUBREG)
1865 replacements->old
1866 = fixup_memory_subreg (replacements->old, insn,
1867 promoted_mode, 0);
1868 else
1869 replacements->old
1870 = fixup_stack_1 (replacements->old, insn);
1872 insert_before = insn;
1874 /* If we are changing the mode, do a conversion.
1875 This might be wasteful, but combine.c will
1876 eliminate much of the waste. */
1878 if (GET_MODE (replacements->new)
1879 != GET_MODE (replacements->old))
1881 start_sequence ();
1882 convert_move (replacements->new,
1883 replacements->old, unsignedp);
1884 seq = gen_sequence ();
1885 end_sequence ();
1887 else
1888 seq = gen_move_insn (replacements->new,
1889 replacements->old);
1891 emit_insn_before (seq, insert_before);
1894 next = replacements->next;
1895 free (replacements);
1896 replacements = next;
1900 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1901 But don't touch other insns referred to by reg-notes;
1902 we will get them elsewhere. */
1903 while (note)
1905 if (GET_CODE (note) != INSN_LIST)
1906 XEXP (note, 0)
1907 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1908 promoted_mode, 1);
1909 note = XEXP (note, 1);
1913 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1914 See if the rtx expression at *LOC in INSN needs to be changed.
1916 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1917 contain a list of original rtx's and replacements. If we find that we need
1918 to modify this insn by replacing a memory reference with a pseudo or by
1919 making a new MEM to implement a SUBREG, we consult that list to see if
1920 we have already chosen a replacement. If none has already been allocated,
1921 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1922 or the SUBREG, as appropriate, to the pseudo. */
1924 static void
1925 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1926 rtx var;
1927 enum machine_mode promoted_mode;
1928 rtx *loc;
1929 rtx insn;
1930 struct fixup_replacement **replacements;
1931 rtx no_share;
1933 int i;
1934 rtx x = *loc;
1935 RTX_CODE code = GET_CODE (x);
1936 const char *fmt;
1937 rtx tem, tem1;
1938 struct fixup_replacement *replacement;
1940 switch (code)
1942 case ADDRESSOF:
1943 if (XEXP (x, 0) == var)
1945 /* Prevent sharing of rtl that might lose. */
1946 rtx sub = copy_rtx (XEXP (var, 0));
1948 if (! validate_change (insn, loc, sub, 0))
1950 rtx y = gen_reg_rtx (GET_MODE (sub));
1951 rtx seq, new_insn;
1953 /* We should be able to replace with a register or all is lost.
1954 Note that we can't use validate_change to verify this, since
1955 we're not caring for replacing all dups simultaneously. */
1956 if (! validate_replace_rtx (*loc, y, insn))
1957 abort ();
1959 /* Careful! First try to recognize a direct move of the
1960 value, mimicking how things are done in gen_reload wrt
1961 PLUS. Consider what happens when insn is a conditional
1962 move instruction and addsi3 clobbers flags. */
1964 start_sequence ();
1965 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1966 seq = gen_sequence ();
1967 end_sequence ();
1969 if (recog_memoized (new_insn) < 0)
1971 /* That failed. Fall back on force_operand and hope. */
1973 start_sequence ();
1974 sub = force_operand (sub, y);
1975 if (sub != y)
1976 emit_insn (gen_move_insn (y, sub));
1977 seq = gen_sequence ();
1978 end_sequence ();
1981 #ifdef HAVE_cc0
1982 /* Don't separate setter from user. */
1983 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1984 insn = PREV_INSN (insn);
1985 #endif
1987 emit_insn_before (seq, insn);
1990 return;
1992 case MEM:
1993 if (var == x)
1995 /* If we already have a replacement, use it. Otherwise,
1996 try to fix up this address in case it is invalid. */
1998 replacement = find_fixup_replacement (replacements, var);
1999 if (replacement->new)
2001 *loc = replacement->new;
2002 return;
2005 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2007 /* Unless we are forcing memory to register or we changed the mode,
2008 we can leave things the way they are if the insn is valid. */
2010 INSN_CODE (insn) = -1;
2011 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2012 && recog_memoized (insn) >= 0)
2013 return;
2015 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2016 return;
2019 /* If X contains VAR, we need to unshare it here so that we update
2020 each occurrence separately. But all identical MEMs in one insn
2021 must be replaced with the same rtx because of the possibility of
2022 MATCH_DUPs. */
2024 if (reg_mentioned_p (var, x))
2026 replacement = find_fixup_replacement (replacements, x);
2027 if (replacement->new == 0)
2028 replacement->new = copy_most_rtx (x, no_share);
2030 *loc = x = replacement->new;
2031 code = GET_CODE (x);
2033 break;
2035 case REG:
2036 case CC0:
2037 case PC:
2038 case CONST_INT:
2039 case CONST:
2040 case SYMBOL_REF:
2041 case LABEL_REF:
2042 case CONST_DOUBLE:
2043 case CONST_VECTOR:
2044 return;
2046 case SIGN_EXTRACT:
2047 case ZERO_EXTRACT:
2048 /* Note that in some cases those types of expressions are altered
2049 by optimize_bit_field, and do not survive to get here. */
2050 if (XEXP (x, 0) == var
2051 || (GET_CODE (XEXP (x, 0)) == SUBREG
2052 && SUBREG_REG (XEXP (x, 0)) == var))
2054 /* Get TEM as a valid MEM in the mode presently in the insn.
2056 We don't worry about the possibility of MATCH_DUP here; it
2057 is highly unlikely and would be tricky to handle. */
2059 tem = XEXP (x, 0);
2060 if (GET_CODE (tem) == SUBREG)
2062 if (GET_MODE_BITSIZE (GET_MODE (tem))
2063 > GET_MODE_BITSIZE (GET_MODE (var)))
2065 replacement = find_fixup_replacement (replacements, var);
2066 if (replacement->new == 0)
2067 replacement->new = gen_reg_rtx (GET_MODE (var));
2068 SUBREG_REG (tem) = replacement->new;
2070 /* The following code works only if we have a MEM, so we
2071 need to handle the subreg here. We directly substitute
2072 it assuming that a subreg must be OK here. We already
2073 scheduled a replacement to copy the mem into the
2074 subreg. */
2075 XEXP (x, 0) = tem;
2076 return;
2078 else
2079 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2081 else
2082 tem = fixup_stack_1 (tem, insn);
2084 /* Unless we want to load from memory, get TEM into the proper mode
2085 for an extract from memory. This can only be done if the
2086 extract is at a constant position and length. */
2088 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2089 && GET_CODE (XEXP (x, 2)) == CONST_INT
2090 && ! mode_dependent_address_p (XEXP (tem, 0))
2091 && ! MEM_VOLATILE_P (tem))
2093 enum machine_mode wanted_mode = VOIDmode;
2094 enum machine_mode is_mode = GET_MODE (tem);
2095 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2097 if (GET_CODE (x) == ZERO_EXTRACT)
2099 enum machine_mode new_mode
2100 = mode_for_extraction (EP_extzv, 1);
2101 if (new_mode != MAX_MACHINE_MODE)
2102 wanted_mode = new_mode;
2104 else if (GET_CODE (x) == SIGN_EXTRACT)
2106 enum machine_mode new_mode
2107 = mode_for_extraction (EP_extv, 1);
2108 if (new_mode != MAX_MACHINE_MODE)
2109 wanted_mode = new_mode;
2112 /* If we have a narrower mode, we can do something. */
2113 if (wanted_mode != VOIDmode
2114 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2116 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2117 rtx old_pos = XEXP (x, 2);
2118 rtx newmem;
2120 /* If the bytes and bits are counted differently, we
2121 must adjust the offset. */
2122 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2123 offset = (GET_MODE_SIZE (is_mode)
2124 - GET_MODE_SIZE (wanted_mode) - offset);
2126 pos %= GET_MODE_BITSIZE (wanted_mode);
2128 newmem = adjust_address_nv (tem, wanted_mode, offset);
2130 /* Make the change and see if the insn remains valid. */
2131 INSN_CODE (insn) = -1;
2132 XEXP (x, 0) = newmem;
2133 XEXP (x, 2) = GEN_INT (pos);
2135 if (recog_memoized (insn) >= 0)
2136 return;
2138 /* Otherwise, restore old position. XEXP (x, 0) will be
2139 restored later. */
2140 XEXP (x, 2) = old_pos;
2144 /* If we get here, the bitfield extract insn can't accept a memory
2145 reference. Copy the input into a register. */
2147 tem1 = gen_reg_rtx (GET_MODE (tem));
2148 emit_insn_before (gen_move_insn (tem1, tem), insn);
2149 XEXP (x, 0) = tem1;
2150 return;
2152 break;
2154 case SUBREG:
2155 if (SUBREG_REG (x) == var)
2157 /* If this is a special SUBREG made because VAR was promoted
2158 from a wider mode, replace it with VAR and call ourself
2159 recursively, this time saying that the object previously
2160 had its current mode (by virtue of the SUBREG). */
2162 if (SUBREG_PROMOTED_VAR_P (x))
2164 *loc = var;
2165 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2166 no_share);
2167 return;
2170 /* If this SUBREG makes VAR wider, it has become a paradoxical
2171 SUBREG with VAR in memory, but these aren't allowed at this
2172 stage of the compilation. So load VAR into a pseudo and take
2173 a SUBREG of that pseudo. */
2174 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2176 replacement = find_fixup_replacement (replacements, var);
2177 if (replacement->new == 0)
2178 replacement->new = gen_reg_rtx (promoted_mode);
2179 SUBREG_REG (x) = replacement->new;
2180 return;
2183 /* See if we have already found a replacement for this SUBREG.
2184 If so, use it. Otherwise, make a MEM and see if the insn
2185 is recognized. If not, or if we should force MEM into a register,
2186 make a pseudo for this SUBREG. */
2187 replacement = find_fixup_replacement (replacements, x);
2188 if (replacement->new)
2190 *loc = replacement->new;
2191 return;
2194 replacement->new = *loc = fixup_memory_subreg (x, insn,
2195 promoted_mode, 0);
2197 INSN_CODE (insn) = -1;
2198 if (! flag_force_mem && recog_memoized (insn) >= 0)
2199 return;
2201 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2202 return;
2204 break;
2206 case SET:
2207 /* First do special simplification of bit-field references. */
2208 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2209 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2210 optimize_bit_field (x, insn, 0);
2211 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2212 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2213 optimize_bit_field (x, insn, 0);
2215 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2216 into a register and then store it back out. */
2217 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2218 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2219 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2220 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2221 > GET_MODE_SIZE (GET_MODE (var))))
2223 replacement = find_fixup_replacement (replacements, var);
2224 if (replacement->new == 0)
2225 replacement->new = gen_reg_rtx (GET_MODE (var));
2227 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2228 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2231 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2232 insn into a pseudo and store the low part of the pseudo into VAR. */
2233 if (GET_CODE (SET_DEST (x)) == SUBREG
2234 && SUBREG_REG (SET_DEST (x)) == var
2235 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2236 > GET_MODE_SIZE (GET_MODE (var))))
2238 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2239 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2240 tem)),
2241 insn);
2242 break;
2246 rtx dest = SET_DEST (x);
2247 rtx src = SET_SRC (x);
2248 rtx outerdest = dest;
2250 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2251 || GET_CODE (dest) == SIGN_EXTRACT
2252 || GET_CODE (dest) == ZERO_EXTRACT)
2253 dest = XEXP (dest, 0);
2255 if (GET_CODE (src) == SUBREG)
2256 src = SUBREG_REG (src);
2258 /* If VAR does not appear at the top level of the SET
2259 just scan the lower levels of the tree. */
2261 if (src != var && dest != var)
2262 break;
2264 /* We will need to rerecognize this insn. */
2265 INSN_CODE (insn) = -1;
2267 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2268 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2270 /* Since this case will return, ensure we fixup all the
2271 operands here. */
2272 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2273 insn, replacements, no_share);
2274 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2275 insn, replacements, no_share);
2276 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2277 insn, replacements, no_share);
2279 tem = XEXP (outerdest, 0);
2281 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2282 that may appear inside a ZERO_EXTRACT.
2283 This was legitimate when the MEM was a REG. */
2284 if (GET_CODE (tem) == SUBREG
2285 && SUBREG_REG (tem) == var)
2286 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2287 else
2288 tem = fixup_stack_1 (tem, insn);
2290 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2291 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2292 && ! mode_dependent_address_p (XEXP (tem, 0))
2293 && ! MEM_VOLATILE_P (tem))
2295 enum machine_mode wanted_mode;
2296 enum machine_mode is_mode = GET_MODE (tem);
2297 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2299 wanted_mode = mode_for_extraction (EP_insv, 0);
2301 /* If we have a narrower mode, we can do something. */
2302 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2304 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2305 rtx old_pos = XEXP (outerdest, 2);
2306 rtx newmem;
2308 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2309 offset = (GET_MODE_SIZE (is_mode)
2310 - GET_MODE_SIZE (wanted_mode) - offset);
2312 pos %= GET_MODE_BITSIZE (wanted_mode);
2314 newmem = adjust_address_nv (tem, wanted_mode, offset);
2316 /* Make the change and see if the insn remains valid. */
2317 INSN_CODE (insn) = -1;
2318 XEXP (outerdest, 0) = newmem;
2319 XEXP (outerdest, 2) = GEN_INT (pos);
2321 if (recog_memoized (insn) >= 0)
2322 return;
2324 /* Otherwise, restore old position. XEXP (x, 0) will be
2325 restored later. */
2326 XEXP (outerdest, 2) = old_pos;
2330 /* If we get here, the bit-field store doesn't allow memory
2331 or isn't located at a constant position. Load the value into
2332 a register, do the store, and put it back into memory. */
2334 tem1 = gen_reg_rtx (GET_MODE (tem));
2335 emit_insn_before (gen_move_insn (tem1, tem), insn);
2336 emit_insn_after (gen_move_insn (tem, tem1), insn);
2337 XEXP (outerdest, 0) = tem1;
2338 return;
2341 /* STRICT_LOW_PART is a no-op on memory references
2342 and it can cause combinations to be unrecognizable,
2343 so eliminate it. */
2345 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2346 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2348 /* A valid insn to copy VAR into or out of a register
2349 must be left alone, to avoid an infinite loop here.
2350 If the reference to VAR is by a subreg, fix that up,
2351 since SUBREG is not valid for a memref.
2352 Also fix up the address of the stack slot.
2354 Note that we must not try to recognize the insn until
2355 after we know that we have valid addresses and no
2356 (subreg (mem ...) ...) constructs, since these interfere
2357 with determining the validity of the insn. */
2359 if ((SET_SRC (x) == var
2360 || (GET_CODE (SET_SRC (x)) == SUBREG
2361 && SUBREG_REG (SET_SRC (x)) == var))
2362 && (GET_CODE (SET_DEST (x)) == REG
2363 || (GET_CODE (SET_DEST (x)) == SUBREG
2364 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2365 && GET_MODE (var) == promoted_mode
2366 && x == single_set (insn))
2368 rtx pat, last;
2370 if (GET_CODE (SET_SRC (x)) == SUBREG
2371 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2372 > GET_MODE_SIZE (GET_MODE (var))))
2374 /* This (subreg VAR) is now a paradoxical subreg. We need
2375 to replace VAR instead of the subreg. */
2376 replacement = find_fixup_replacement (replacements, var);
2377 if (replacement->new == NULL_RTX)
2378 replacement->new = gen_reg_rtx (GET_MODE (var));
2379 SUBREG_REG (SET_SRC (x)) = replacement->new;
2381 else
2383 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2384 if (replacement->new)
2385 SET_SRC (x) = replacement->new;
2386 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2387 SET_SRC (x) = replacement->new
2388 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2390 else
2391 SET_SRC (x) = replacement->new
2392 = fixup_stack_1 (SET_SRC (x), insn);
2395 if (recog_memoized (insn) >= 0)
2396 return;
2398 /* INSN is not valid, but we know that we want to
2399 copy SET_SRC (x) to SET_DEST (x) in some way. So
2400 we generate the move and see whether it requires more
2401 than one insn. If it does, we emit those insns and
2402 delete INSN. Otherwise, we an just replace the pattern
2403 of INSN; we have already verified above that INSN has
2404 no other function that to do X. */
2406 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2407 if (GET_CODE (pat) == SEQUENCE)
2409 last = emit_insn_before (pat, insn);
2411 /* INSN might have REG_RETVAL or other important notes, so
2412 we need to store the pattern of the last insn in the
2413 sequence into INSN similarly to the normal case. LAST
2414 should not have REG_NOTES, but we allow them if INSN has
2415 no REG_NOTES. */
2416 if (REG_NOTES (last) && REG_NOTES (insn))
2417 abort ();
2418 if (REG_NOTES (last))
2419 REG_NOTES (insn) = REG_NOTES (last);
2420 PATTERN (insn) = PATTERN (last);
2422 delete_insn (last);
2424 else
2425 PATTERN (insn) = pat;
2427 return;
2430 if ((SET_DEST (x) == var
2431 || (GET_CODE (SET_DEST (x)) == SUBREG
2432 && SUBREG_REG (SET_DEST (x)) == var))
2433 && (GET_CODE (SET_SRC (x)) == REG
2434 || (GET_CODE (SET_SRC (x)) == SUBREG
2435 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2436 && GET_MODE (var) == promoted_mode
2437 && x == single_set (insn))
2439 rtx pat, last;
2441 if (GET_CODE (SET_DEST (x)) == SUBREG)
2442 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2443 promoted_mode, 0);
2444 else
2445 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2447 if (recog_memoized (insn) >= 0)
2448 return;
2450 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2451 if (GET_CODE (pat) == SEQUENCE)
2453 last = emit_insn_before (pat, insn);
2455 /* INSN might have REG_RETVAL or other important notes, so
2456 we need to store the pattern of the last insn in the
2457 sequence into INSN similarly to the normal case. LAST
2458 should not have REG_NOTES, but we allow them if INSN has
2459 no REG_NOTES. */
2460 if (REG_NOTES (last) && REG_NOTES (insn))
2461 abort ();
2462 if (REG_NOTES (last))
2463 REG_NOTES (insn) = REG_NOTES (last);
2464 PATTERN (insn) = PATTERN (last);
2466 delete_insn (last);
2468 else
2469 PATTERN (insn) = pat;
2471 return;
2474 /* Otherwise, storing into VAR must be handled specially
2475 by storing into a temporary and copying that into VAR
2476 with a new insn after this one. Note that this case
2477 will be used when storing into a promoted scalar since
2478 the insn will now have different modes on the input
2479 and output and hence will be invalid (except for the case
2480 of setting it to a constant, which does not need any
2481 change if it is valid). We generate extra code in that case,
2482 but combine.c will eliminate it. */
2484 if (dest == var)
2486 rtx temp;
2487 rtx fixeddest = SET_DEST (x);
2488 enum machine_mode temp_mode;
2490 /* STRICT_LOW_PART can be discarded, around a MEM. */
2491 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2492 fixeddest = XEXP (fixeddest, 0);
2493 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2494 if (GET_CODE (fixeddest) == SUBREG)
2496 fixeddest = fixup_memory_subreg (fixeddest, insn,
2497 promoted_mode, 0);
2498 temp_mode = GET_MODE (fixeddest);
2500 else
2502 fixeddest = fixup_stack_1 (fixeddest, insn);
2503 temp_mode = promoted_mode;
2506 temp = gen_reg_rtx (temp_mode);
2508 emit_insn_after (gen_move_insn (fixeddest,
2509 gen_lowpart (GET_MODE (fixeddest),
2510 temp)),
2511 insn);
2513 SET_DEST (x) = temp;
2517 default:
2518 break;
2521 /* Nothing special about this RTX; fix its operands. */
2523 fmt = GET_RTX_FORMAT (code);
2524 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2526 if (fmt[i] == 'e')
2527 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2528 no_share);
2529 else if (fmt[i] == 'E')
2531 int j;
2532 for (j = 0; j < XVECLEN (x, i); j++)
2533 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2534 insn, replacements, no_share);
2539 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2540 The REG was placed on the stack, so X now has the form (SUBREG:m1
2541 (MEM:m2 ...)).
2543 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2544 must be emitted to compute NEWADDR, put them before INSN.
2546 UNCRITICAL nonzero means accept paradoxical subregs.
2547 This is used for subregs found inside REG_NOTES. */
2549 static rtx
2550 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2551 rtx x;
2552 rtx insn;
2553 enum machine_mode promoted_mode;
2554 int uncritical;
2556 int offset;
2557 rtx mem = SUBREG_REG (x);
2558 rtx addr = XEXP (mem, 0);
2559 enum machine_mode mode = GET_MODE (x);
2560 rtx result;
2562 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2563 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2564 abort ();
2566 offset = SUBREG_BYTE (x);
2567 if (BYTES_BIG_ENDIAN)
2568 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2569 the offset so that it points to the right location within the
2570 MEM. */
2571 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2573 if (!flag_force_addr
2574 && memory_address_p (mode, plus_constant (addr, offset)))
2575 /* Shortcut if no insns need be emitted. */
2576 return adjust_address (mem, mode, offset);
2578 start_sequence ();
2579 result = adjust_address (mem, mode, offset);
2580 emit_insn_before (gen_sequence (), insn);
2581 end_sequence ();
2582 return result;
2585 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2586 Replace subexpressions of X in place.
2587 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2588 Otherwise return X, with its contents possibly altered.
2590 INSN, PROMOTED_MODE and UNCRITICAL are as for
2591 fixup_memory_subreg. */
2593 static rtx
2594 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2595 rtx x;
2596 rtx insn;
2597 enum machine_mode promoted_mode;
2598 int uncritical;
2600 enum rtx_code code;
2601 const char *fmt;
2602 int i;
2604 if (x == 0)
2605 return 0;
2607 code = GET_CODE (x);
2609 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2610 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2612 /* Nothing special about this RTX; fix its operands. */
2614 fmt = GET_RTX_FORMAT (code);
2615 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2617 if (fmt[i] == 'e')
2618 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2619 promoted_mode, uncritical);
2620 else if (fmt[i] == 'E')
2622 int j;
2623 for (j = 0; j < XVECLEN (x, i); j++)
2624 XVECEXP (x, i, j)
2625 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2626 promoted_mode, uncritical);
2629 return x;
2632 /* For each memory ref within X, if it refers to a stack slot
2633 with an out of range displacement, put the address in a temp register
2634 (emitting new insns before INSN to load these registers)
2635 and alter the memory ref to use that register.
2636 Replace each such MEM rtx with a copy, to avoid clobberage. */
2638 static rtx
2639 fixup_stack_1 (x, insn)
2640 rtx x;
2641 rtx insn;
2643 int i;
2644 RTX_CODE code = GET_CODE (x);
2645 const char *fmt;
2647 if (code == MEM)
2649 rtx ad = XEXP (x, 0);
2650 /* If we have address of a stack slot but it's not valid
2651 (displacement is too large), compute the sum in a register. */
2652 if (GET_CODE (ad) == PLUS
2653 && GET_CODE (XEXP (ad, 0)) == REG
2654 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2655 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2656 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2657 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2658 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2659 #endif
2660 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2661 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2662 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2663 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2665 rtx temp, seq;
2666 if (memory_address_p (GET_MODE (x), ad))
2667 return x;
2669 start_sequence ();
2670 temp = copy_to_reg (ad);
2671 seq = gen_sequence ();
2672 end_sequence ();
2673 emit_insn_before (seq, insn);
2674 return replace_equiv_address (x, temp);
2676 return x;
2679 fmt = GET_RTX_FORMAT (code);
2680 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2682 if (fmt[i] == 'e')
2683 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2684 else if (fmt[i] == 'E')
2686 int j;
2687 for (j = 0; j < XVECLEN (x, i); j++)
2688 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2691 return x;
2694 /* Optimization: a bit-field instruction whose field
2695 happens to be a byte or halfword in memory
2696 can be changed to a move instruction.
2698 We call here when INSN is an insn to examine or store into a bit-field.
2699 BODY is the SET-rtx to be altered.
2701 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2702 (Currently this is called only from function.c, and EQUIV_MEM
2703 is always 0.) */
2705 static void
2706 optimize_bit_field (body, insn, equiv_mem)
2707 rtx body;
2708 rtx insn;
2709 rtx *equiv_mem;
2711 rtx bitfield;
2712 int destflag;
2713 rtx seq = 0;
2714 enum machine_mode mode;
2716 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2717 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2718 bitfield = SET_DEST (body), destflag = 1;
2719 else
2720 bitfield = SET_SRC (body), destflag = 0;
2722 /* First check that the field being stored has constant size and position
2723 and is in fact a byte or halfword suitably aligned. */
2725 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2726 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2727 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2728 != BLKmode)
2729 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2731 rtx memref = 0;
2733 /* Now check that the containing word is memory, not a register,
2734 and that it is safe to change the machine mode. */
2736 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2737 memref = XEXP (bitfield, 0);
2738 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2739 && equiv_mem != 0)
2740 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2741 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2742 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2743 memref = SUBREG_REG (XEXP (bitfield, 0));
2744 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2745 && equiv_mem != 0
2746 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2747 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2749 if (memref
2750 && ! mode_dependent_address_p (XEXP (memref, 0))
2751 && ! MEM_VOLATILE_P (memref))
2753 /* Now adjust the address, first for any subreg'ing
2754 that we are now getting rid of,
2755 and then for which byte of the word is wanted. */
2757 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2758 rtx insns;
2760 /* Adjust OFFSET to count bits from low-address byte. */
2761 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2762 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2763 - offset - INTVAL (XEXP (bitfield, 1)));
2765 /* Adjust OFFSET to count bytes from low-address byte. */
2766 offset /= BITS_PER_UNIT;
2767 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2769 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2770 / UNITS_PER_WORD) * UNITS_PER_WORD;
2771 if (BYTES_BIG_ENDIAN)
2772 offset -= (MIN (UNITS_PER_WORD,
2773 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2774 - MIN (UNITS_PER_WORD,
2775 GET_MODE_SIZE (GET_MODE (memref))));
2778 start_sequence ();
2779 memref = adjust_address (memref, mode, offset);
2780 insns = get_insns ();
2781 end_sequence ();
2782 emit_insns_before (insns, insn);
2784 /* Store this memory reference where
2785 we found the bit field reference. */
2787 if (destflag)
2789 validate_change (insn, &SET_DEST (body), memref, 1);
2790 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2792 rtx src = SET_SRC (body);
2793 while (GET_CODE (src) == SUBREG
2794 && SUBREG_BYTE (src) == 0)
2795 src = SUBREG_REG (src);
2796 if (GET_MODE (src) != GET_MODE (memref))
2797 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2798 validate_change (insn, &SET_SRC (body), src, 1);
2800 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2801 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2802 /* This shouldn't happen because anything that didn't have
2803 one of these modes should have got converted explicitly
2804 and then referenced through a subreg.
2805 This is so because the original bit-field was
2806 handled by agg_mode and so its tree structure had
2807 the same mode that memref now has. */
2808 abort ();
2810 else
2812 rtx dest = SET_DEST (body);
2814 while (GET_CODE (dest) == SUBREG
2815 && SUBREG_BYTE (dest) == 0
2816 && (GET_MODE_CLASS (GET_MODE (dest))
2817 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2818 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2819 <= UNITS_PER_WORD))
2820 dest = SUBREG_REG (dest);
2822 validate_change (insn, &SET_DEST (body), dest, 1);
2824 if (GET_MODE (dest) == GET_MODE (memref))
2825 validate_change (insn, &SET_SRC (body), memref, 1);
2826 else
2828 /* Convert the mem ref to the destination mode. */
2829 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2831 start_sequence ();
2832 convert_move (newreg, memref,
2833 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2834 seq = get_insns ();
2835 end_sequence ();
2837 validate_change (insn, &SET_SRC (body), newreg, 1);
2841 /* See if we can convert this extraction or insertion into
2842 a simple move insn. We might not be able to do so if this
2843 was, for example, part of a PARALLEL.
2845 If we succeed, write out any needed conversions. If we fail,
2846 it is hard to guess why we failed, so don't do anything
2847 special; just let the optimization be suppressed. */
2849 if (apply_change_group () && seq)
2850 emit_insns_before (seq, insn);
2855 /* These routines are responsible for converting virtual register references
2856 to the actual hard register references once RTL generation is complete.
2858 The following four variables are used for communication between the
2859 routines. They contain the offsets of the virtual registers from their
2860 respective hard registers. */
2862 static int in_arg_offset;
2863 static int var_offset;
2864 static int dynamic_offset;
2865 static int out_arg_offset;
2866 static int cfa_offset;
2868 /* In most machines, the stack pointer register is equivalent to the bottom
2869 of the stack. */
2871 #ifndef STACK_POINTER_OFFSET
2872 #define STACK_POINTER_OFFSET 0
2873 #endif
2875 /* If not defined, pick an appropriate default for the offset of dynamically
2876 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2877 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2879 #ifndef STACK_DYNAMIC_OFFSET
2881 /* The bottom of the stack points to the actual arguments. If
2882 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2883 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2884 stack space for register parameters is not pushed by the caller, but
2885 rather part of the fixed stack areas and hence not included in
2886 `current_function_outgoing_args_size'. Nevertheless, we must allow
2887 for it when allocating stack dynamic objects. */
2889 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2890 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2891 ((ACCUMULATE_OUTGOING_ARGS \
2892 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2893 + (STACK_POINTER_OFFSET)) \
2895 #else
2896 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2897 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2898 + (STACK_POINTER_OFFSET))
2899 #endif
2900 #endif
2902 /* On most machines, the CFA coincides with the first incoming parm. */
2904 #ifndef ARG_POINTER_CFA_OFFSET
2905 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2906 #endif
2908 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2909 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2910 register, for later use if we do need to force REG into the stack. REG is
2911 overwritten by the MEM like in put_reg_into_stack. */
2914 gen_mem_addressof (reg, decl)
2915 rtx reg;
2916 tree decl;
2918 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2919 REGNO (reg), decl);
2921 /* Calculate this before we start messing with decl's RTL. */
2922 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2924 /* If the original REG was a user-variable, then so is the REG whose
2925 address is being taken. Likewise for unchanging. */
2926 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2927 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2929 PUT_CODE (reg, MEM);
2930 MEM_ATTRS (reg) = 0;
2931 XEXP (reg, 0) = r;
2933 if (decl)
2935 tree type = TREE_TYPE (decl);
2936 enum machine_mode decl_mode
2937 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2938 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2939 : DECL_RTL_IF_SET (decl));
2941 PUT_MODE (reg, decl_mode);
2943 /* Clear DECL_RTL momentarily so functions below will work
2944 properly, then set it again. */
2945 if (DECL_P (decl) && decl_rtl == reg)
2946 SET_DECL_RTL (decl, 0);
2948 set_mem_attributes (reg, decl, 1);
2949 set_mem_alias_set (reg, set);
2951 if (DECL_P (decl) && decl_rtl == reg)
2952 SET_DECL_RTL (decl, reg);
2954 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2955 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2957 else
2958 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2960 return reg;
2963 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2965 void
2966 flush_addressof (decl)
2967 tree decl;
2969 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2970 && DECL_RTL (decl) != 0
2971 && GET_CODE (DECL_RTL (decl)) == MEM
2972 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2973 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2974 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2977 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2979 static void
2980 put_addressof_into_stack (r, ht)
2981 rtx r;
2982 struct hash_table *ht;
2984 tree decl, type;
2985 int volatile_p, used_p;
2987 rtx reg = XEXP (r, 0);
2989 if (GET_CODE (reg) != REG)
2990 abort ();
2992 decl = ADDRESSOF_DECL (r);
2993 if (decl)
2995 type = TREE_TYPE (decl);
2996 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2997 && TREE_THIS_VOLATILE (decl));
2998 used_p = (TREE_USED (decl)
2999 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
3001 else
3003 type = NULL_TREE;
3004 volatile_p = 0;
3005 used_p = 1;
3008 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
3009 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
3012 /* List of replacements made below in purge_addressof_1 when creating
3013 bitfield insertions. */
3014 static rtx purge_bitfield_addressof_replacements;
3016 /* List of replacements made below in purge_addressof_1 for patterns
3017 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3018 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3019 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3020 enough in complex cases, e.g. when some field values can be
3021 extracted by usage MEM with narrower mode. */
3022 static rtx purge_addressof_replacements;
3024 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3025 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3026 the stack. If the function returns FALSE then the replacement could not
3027 be made. */
3029 static bool
3030 purge_addressof_1 (loc, insn, force, store, ht)
3031 rtx *loc;
3032 rtx insn;
3033 int force, store;
3034 struct hash_table *ht;
3036 rtx x;
3037 RTX_CODE code;
3038 int i, j;
3039 const char *fmt;
3040 bool result = true;
3042 /* Re-start here to avoid recursion in common cases. */
3043 restart:
3045 x = *loc;
3046 if (x == 0)
3047 return true;
3049 code = GET_CODE (x);
3051 /* If we don't return in any of the cases below, we will recurse inside
3052 the RTX, which will normally result in any ADDRESSOF being forced into
3053 memory. */
3054 if (code == SET)
3056 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3057 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3058 return result;
3060 else if (code == ADDRESSOF)
3062 rtx sub, insns;
3064 if (GET_CODE (XEXP (x, 0)) != MEM)
3066 put_addressof_into_stack (x, ht);
3067 return true;
3070 /* We must create a copy of the rtx because it was created by
3071 overwriting a REG rtx which is always shared. */
3072 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3073 if (validate_change (insn, loc, sub, 0)
3074 || validate_replace_rtx (x, sub, insn))
3075 return true;
3077 start_sequence ();
3078 sub = force_operand (sub, NULL_RTX);
3079 if (! validate_change (insn, loc, sub, 0)
3080 && ! validate_replace_rtx (x, sub, insn))
3081 abort ();
3083 insns = gen_sequence ();
3084 end_sequence ();
3085 emit_insn_before (insns, insn);
3086 return true;
3089 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3091 rtx sub = XEXP (XEXP (x, 0), 0);
3093 if (GET_CODE (sub) == MEM)
3094 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3095 else if (GET_CODE (sub) == REG
3096 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3098 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3100 int size_x, size_sub;
3102 if (!insn)
3104 /* When processing REG_NOTES look at the list of
3105 replacements done on the insn to find the register that X
3106 was replaced by. */
3107 rtx tem;
3109 for (tem = purge_bitfield_addressof_replacements;
3110 tem != NULL_RTX;
3111 tem = XEXP (XEXP (tem, 1), 1))
3112 if (rtx_equal_p (x, XEXP (tem, 0)))
3114 *loc = XEXP (XEXP (tem, 1), 0);
3115 return true;
3118 /* See comment for purge_addressof_replacements. */
3119 for (tem = purge_addressof_replacements;
3120 tem != NULL_RTX;
3121 tem = XEXP (XEXP (tem, 1), 1))
3122 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3124 rtx z = XEXP (XEXP (tem, 1), 0);
3126 if (GET_MODE (x) == GET_MODE (z)
3127 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3128 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3129 abort ();
3131 /* It can happen that the note may speak of things
3132 in a wider (or just different) mode than the
3133 code did. This is especially true of
3134 REG_RETVAL. */
3136 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3137 z = SUBREG_REG (z);
3139 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3140 && (GET_MODE_SIZE (GET_MODE (x))
3141 > GET_MODE_SIZE (GET_MODE (z))))
3143 /* This can occur as a result in invalid
3144 pointer casts, e.g. float f; ...
3145 *(long long int *)&f.
3146 ??? We could emit a warning here, but
3147 without a line number that wouldn't be
3148 very helpful. */
3149 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3151 else
3152 z = gen_lowpart (GET_MODE (x), z);
3154 *loc = z;
3155 return true;
3158 /* Sometimes we may not be able to find the replacement. For
3159 example when the original insn was a MEM in a wider mode,
3160 and the note is part of a sign extension of a narrowed
3161 version of that MEM. Gcc testcase compile/990829-1.c can
3162 generate an example of this situation. Rather than complain
3163 we return false, which will prompt our caller to remove the
3164 offending note. */
3165 return false;
3168 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3169 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3171 /* Don't even consider working with paradoxical subregs,
3172 or the moral equivalent seen here. */
3173 if (size_x <= size_sub
3174 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3176 /* Do a bitfield insertion to mirror what would happen
3177 in memory. */
3179 rtx val, seq;
3181 if (store)
3183 rtx p = PREV_INSN (insn);
3185 start_sequence ();
3186 val = gen_reg_rtx (GET_MODE (x));
3187 if (! validate_change (insn, loc, val, 0))
3189 /* Discard the current sequence and put the
3190 ADDRESSOF on stack. */
3191 end_sequence ();
3192 goto give_up;
3194 seq = gen_sequence ();
3195 end_sequence ();
3196 emit_insn_before (seq, insn);
3197 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3198 insn, ht);
3200 start_sequence ();
3201 store_bit_field (sub, size_x, 0, GET_MODE (x),
3202 val, GET_MODE_SIZE (GET_MODE (sub)));
3204 /* Make sure to unshare any shared rtl that store_bit_field
3205 might have created. */
3206 unshare_all_rtl_again (get_insns ());
3208 seq = gen_sequence ();
3209 end_sequence ();
3210 p = emit_insn_after (seq, insn);
3211 if (NEXT_INSN (insn))
3212 compute_insns_for_mem (NEXT_INSN (insn),
3213 p ? NEXT_INSN (p) : NULL_RTX,
3214 ht);
3216 else
3218 rtx p = PREV_INSN (insn);
3220 start_sequence ();
3221 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3222 GET_MODE (x), GET_MODE (x),
3223 GET_MODE_SIZE (GET_MODE (sub)));
3225 if (! validate_change (insn, loc, val, 0))
3227 /* Discard the current sequence and put the
3228 ADDRESSOF on stack. */
3229 end_sequence ();
3230 goto give_up;
3233 seq = gen_sequence ();
3234 end_sequence ();
3235 emit_insn_before (seq, insn);
3236 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3237 insn, ht);
3240 /* Remember the replacement so that the same one can be done
3241 on the REG_NOTES. */
3242 purge_bitfield_addressof_replacements
3243 = gen_rtx_EXPR_LIST (VOIDmode, x,
3244 gen_rtx_EXPR_LIST
3245 (VOIDmode, val,
3246 purge_bitfield_addressof_replacements));
3248 /* We replaced with a reg -- all done. */
3249 return true;
3253 else if (validate_change (insn, loc, sub, 0))
3255 /* Remember the replacement so that the same one can be done
3256 on the REG_NOTES. */
3257 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3259 rtx tem;
3261 for (tem = purge_addressof_replacements;
3262 tem != NULL_RTX;
3263 tem = XEXP (XEXP (tem, 1), 1))
3264 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3266 XEXP (XEXP (tem, 1), 0) = sub;
3267 return true;
3269 purge_addressof_replacements
3270 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3271 gen_rtx_EXPR_LIST (VOIDmode, sub,
3272 purge_addressof_replacements));
3273 return true;
3275 goto restart;
3279 give_up:
3280 /* Scan all subexpressions. */
3281 fmt = GET_RTX_FORMAT (code);
3282 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3284 if (*fmt == 'e')
3285 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3286 else if (*fmt == 'E')
3287 for (j = 0; j < XVECLEN (x, i); j++)
3288 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3291 return result;
3294 /* Return a new hash table entry in HT. */
3296 static struct hash_entry *
3297 insns_for_mem_newfunc (he, ht, k)
3298 struct hash_entry *he;
3299 struct hash_table *ht;
3300 hash_table_key k ATTRIBUTE_UNUSED;
3302 struct insns_for_mem_entry *ifmhe;
3303 if (he)
3304 return he;
3306 ifmhe = ((struct insns_for_mem_entry *)
3307 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3308 ifmhe->insns = NULL_RTX;
3310 return &ifmhe->he;
3313 /* Return a hash value for K, a REG. */
3315 static unsigned long
3316 insns_for_mem_hash (k)
3317 hash_table_key k;
3319 /* K is really a RTX. Just use the address as the hash value. */
3320 return (unsigned long) k;
3323 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3325 static bool
3326 insns_for_mem_comp (k1, k2)
3327 hash_table_key k1;
3328 hash_table_key k2;
3330 return k1 == k2;
3333 struct insns_for_mem_walk_info
3335 /* The hash table that we are using to record which INSNs use which
3336 MEMs. */
3337 struct hash_table *ht;
3339 /* The INSN we are currently processing. */
3340 rtx insn;
3342 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3343 to find the insns that use the REGs in the ADDRESSOFs. */
3344 int pass;
3347 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3348 that might be used in an ADDRESSOF expression, record this INSN in
3349 the hash table given by DATA (which is really a pointer to an
3350 insns_for_mem_walk_info structure). */
3352 static int
3353 insns_for_mem_walk (r, data)
3354 rtx *r;
3355 void *data;
3357 struct insns_for_mem_walk_info *ifmwi
3358 = (struct insns_for_mem_walk_info *) data;
3360 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3361 && GET_CODE (XEXP (*r, 0)) == REG)
3362 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3363 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3365 /* Lookup this MEM in the hashtable, creating it if necessary. */
3366 struct insns_for_mem_entry *ifme
3367 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3369 /*create=*/0,
3370 /*copy=*/0);
3372 /* If we have not already recorded this INSN, do so now. Since
3373 we process the INSNs in order, we know that if we have
3374 recorded it it must be at the front of the list. */
3375 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3376 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3377 ifme->insns);
3380 return 0;
3383 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3384 which REGs in HT. */
3386 static void
3387 compute_insns_for_mem (insns, last_insn, ht)
3388 rtx insns;
3389 rtx last_insn;
3390 struct hash_table *ht;
3392 rtx insn;
3393 struct insns_for_mem_walk_info ifmwi;
3394 ifmwi.ht = ht;
3396 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3397 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3398 if (INSN_P (insn))
3400 ifmwi.insn = insn;
3401 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3405 /* Helper function for purge_addressof called through for_each_rtx.
3406 Returns true iff the rtl is an ADDRESSOF. */
3408 static int
3409 is_addressof (rtl, data)
3410 rtx *rtl;
3411 void *data ATTRIBUTE_UNUSED;
3413 return GET_CODE (*rtl) == ADDRESSOF;
3416 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3417 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3418 stack. */
3420 void
3421 purge_addressof (insns)
3422 rtx insns;
3424 rtx insn;
3425 struct hash_table ht;
3427 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3428 requires a fixup pass over the instruction stream to correct
3429 INSNs that depended on the REG being a REG, and not a MEM. But,
3430 these fixup passes are slow. Furthermore, most MEMs are not
3431 mentioned in very many instructions. So, we speed up the process
3432 by pre-calculating which REGs occur in which INSNs; that allows
3433 us to perform the fixup passes much more quickly. */
3434 hash_table_init (&ht,
3435 insns_for_mem_newfunc,
3436 insns_for_mem_hash,
3437 insns_for_mem_comp);
3438 compute_insns_for_mem (insns, NULL_RTX, &ht);
3440 for (insn = insns; insn; insn = NEXT_INSN (insn))
3441 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3442 || GET_CODE (insn) == CALL_INSN)
3444 if (! purge_addressof_1 (&PATTERN (insn), insn,
3445 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3446 /* If we could not replace the ADDRESSOFs in the insn,
3447 something is wrong. */
3448 abort ();
3450 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3452 /* If we could not replace the ADDRESSOFs in the insn's notes,
3453 we can just remove the offending notes instead. */
3454 rtx note;
3456 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3458 /* If we find a REG_RETVAL note then the insn is a libcall.
3459 Such insns must have REG_EQUAL notes as well, in order
3460 for later passes of the compiler to work. So it is not
3461 safe to delete the notes here, and instead we abort. */
3462 if (REG_NOTE_KIND (note) == REG_RETVAL)
3463 abort ();
3464 if (for_each_rtx (&note, is_addressof, NULL))
3465 remove_note (insn, note);
3470 /* Clean up. */
3471 hash_table_free (&ht);
3472 purge_bitfield_addressof_replacements = 0;
3473 purge_addressof_replacements = 0;
3475 /* REGs are shared. purge_addressof will destructively replace a REG
3476 with a MEM, which creates shared MEMs.
3478 Unfortunately, the children of put_reg_into_stack assume that MEMs
3479 referring to the same stack slot are shared (fixup_var_refs and
3480 the associated hash table code).
3482 So, we have to do another unsharing pass after we have flushed any
3483 REGs that had their address taken into the stack.
3485 It may be worth tracking whether or not we converted any REGs into
3486 MEMs to avoid this overhead when it is not needed. */
3487 unshare_all_rtl_again (get_insns ());
3490 /* Convert a SET of a hard subreg to a set of the appropriate hard
3491 register. A subroutine of purge_hard_subreg_sets. */
3493 static void
3494 purge_single_hard_subreg_set (pattern)
3495 rtx pattern;
3497 rtx reg = SET_DEST (pattern);
3498 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3499 int offset = 0;
3501 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3502 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3504 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3505 GET_MODE (SUBREG_REG (reg)),
3506 SUBREG_BYTE (reg),
3507 GET_MODE (reg));
3508 reg = SUBREG_REG (reg);
3512 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3514 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3515 SET_DEST (pattern) = reg;
3519 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3520 only such SETs that we expect to see are those left in because
3521 integrate can't handle sets of parts of a return value register.
3523 We don't use alter_subreg because we only want to eliminate subregs
3524 of hard registers. */
3526 void
3527 purge_hard_subreg_sets (insn)
3528 rtx insn;
3530 for (; insn; insn = NEXT_INSN (insn))
3532 if (INSN_P (insn))
3534 rtx pattern = PATTERN (insn);
3535 switch (GET_CODE (pattern))
3537 case SET:
3538 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3539 purge_single_hard_subreg_set (pattern);
3540 break;
3541 case PARALLEL:
3543 int j;
3544 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3546 rtx inner_pattern = XVECEXP (pattern, 0, j);
3547 if (GET_CODE (inner_pattern) == SET
3548 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3549 purge_single_hard_subreg_set (inner_pattern);
3552 break;
3553 default:
3554 break;
3560 /* Pass through the INSNS of function FNDECL and convert virtual register
3561 references to hard register references. */
3563 void
3564 instantiate_virtual_regs (fndecl, insns)
3565 tree fndecl;
3566 rtx insns;
3568 rtx insn;
3569 unsigned int i;
3571 /* Compute the offsets to use for this function. */
3572 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3573 var_offset = STARTING_FRAME_OFFSET;
3574 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3575 out_arg_offset = STACK_POINTER_OFFSET;
3576 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3578 /* Scan all variables and parameters of this function. For each that is
3579 in memory, instantiate all virtual registers if the result is a valid
3580 address. If not, we do it later. That will handle most uses of virtual
3581 regs on many machines. */
3582 instantiate_decls (fndecl, 1);
3584 /* Initialize recognition, indicating that volatile is OK. */
3585 init_recog ();
3587 /* Scan through all the insns, instantiating every virtual register still
3588 present. */
3589 for (insn = insns; insn; insn = NEXT_INSN (insn))
3590 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3591 || GET_CODE (insn) == CALL_INSN)
3593 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3594 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3595 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3596 if (GET_CODE (insn) == CALL_INSN)
3597 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3598 NULL_RTX, 0);
3601 /* Instantiate the stack slots for the parm registers, for later use in
3602 addressof elimination. */
3603 for (i = 0; i < max_parm_reg; ++i)
3604 if (parm_reg_stack_loc[i])
3605 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3607 /* Now instantiate the remaining register equivalences for debugging info.
3608 These will not be valid addresses. */
3609 instantiate_decls (fndecl, 0);
3611 /* Indicate that, from now on, assign_stack_local should use
3612 frame_pointer_rtx. */
3613 virtuals_instantiated = 1;
3616 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3617 all virtual registers in their DECL_RTL's.
3619 If VALID_ONLY, do this only if the resulting address is still valid.
3620 Otherwise, always do it. */
3622 static void
3623 instantiate_decls (fndecl, valid_only)
3624 tree fndecl;
3625 int valid_only;
3627 tree decl;
3629 /* Process all parameters of the function. */
3630 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3632 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3633 HOST_WIDE_INT size_rtl;
3635 instantiate_decl (DECL_RTL (decl), size, valid_only);
3637 /* If the parameter was promoted, then the incoming RTL mode may be
3638 larger than the declared type size. We must use the larger of
3639 the two sizes. */
3640 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3641 size = MAX (size_rtl, size);
3642 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3645 /* Now process all variables defined in the function or its subblocks. */
3646 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3649 /* Subroutine of instantiate_decls: Process all decls in the given
3650 BLOCK node and all its subblocks. */
3652 static void
3653 instantiate_decls_1 (let, valid_only)
3654 tree let;
3655 int valid_only;
3657 tree t;
3659 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3660 if (DECL_RTL_SET_P (t))
3661 instantiate_decl (DECL_RTL (t),
3662 int_size_in_bytes (TREE_TYPE (t)),
3663 valid_only);
3665 /* Process all subblocks. */
3666 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3667 instantiate_decls_1 (t, valid_only);
3670 /* Subroutine of the preceding procedures: Given RTL representing a
3671 decl and the size of the object, do any instantiation required.
3673 If VALID_ONLY is non-zero, it means that the RTL should only be
3674 changed if the new address is valid. */
3676 static void
3677 instantiate_decl (x, size, valid_only)
3678 rtx x;
3679 HOST_WIDE_INT size;
3680 int valid_only;
3682 enum machine_mode mode;
3683 rtx addr;
3685 /* If this is not a MEM, no need to do anything. Similarly if the
3686 address is a constant or a register that is not a virtual register. */
3688 if (x == 0 || GET_CODE (x) != MEM)
3689 return;
3691 addr = XEXP (x, 0);
3692 if (CONSTANT_P (addr)
3693 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3694 || (GET_CODE (addr) == REG
3695 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3696 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3697 return;
3699 /* If we should only do this if the address is valid, copy the address.
3700 We need to do this so we can undo any changes that might make the
3701 address invalid. This copy is unfortunate, but probably can't be
3702 avoided. */
3704 if (valid_only)
3705 addr = copy_rtx (addr);
3707 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3709 if (valid_only && size >= 0)
3711 unsigned HOST_WIDE_INT decl_size = size;
3713 /* Now verify that the resulting address is valid for every integer or
3714 floating-point mode up to and including SIZE bytes long. We do this
3715 since the object might be accessed in any mode and frame addresses
3716 are shared. */
3718 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3719 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3720 mode = GET_MODE_WIDER_MODE (mode))
3721 if (! memory_address_p (mode, addr))
3722 return;
3724 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3725 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3726 mode = GET_MODE_WIDER_MODE (mode))
3727 if (! memory_address_p (mode, addr))
3728 return;
3731 /* Put back the address now that we have updated it and we either know
3732 it is valid or we don't care whether it is valid. */
3734 XEXP (x, 0) = addr;
3737 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3738 is a virtual register, return the equivalent hard register and set the
3739 offset indirectly through the pointer. Otherwise, return 0. */
3741 static rtx
3742 instantiate_new_reg (x, poffset)
3743 rtx x;
3744 HOST_WIDE_INT *poffset;
3746 rtx new;
3747 HOST_WIDE_INT offset;
3749 if (x == virtual_incoming_args_rtx)
3750 new = arg_pointer_rtx, offset = in_arg_offset;
3751 else if (x == virtual_stack_vars_rtx)
3752 new = frame_pointer_rtx, offset = var_offset;
3753 else if (x == virtual_stack_dynamic_rtx)
3754 new = stack_pointer_rtx, offset = dynamic_offset;
3755 else if (x == virtual_outgoing_args_rtx)
3756 new = stack_pointer_rtx, offset = out_arg_offset;
3757 else if (x == virtual_cfa_rtx)
3758 new = arg_pointer_rtx, offset = cfa_offset;
3759 else
3760 return 0;
3762 *poffset = offset;
3763 return new;
3766 /* Given a pointer to a piece of rtx and an optional pointer to the
3767 containing object, instantiate any virtual registers present in it.
3769 If EXTRA_INSNS, we always do the replacement and generate
3770 any extra insns before OBJECT. If it zero, we do nothing if replacement
3771 is not valid.
3773 Return 1 if we either had nothing to do or if we were able to do the
3774 needed replacement. Return 0 otherwise; we only return zero if
3775 EXTRA_INSNS is zero.
3777 We first try some simple transformations to avoid the creation of extra
3778 pseudos. */
3780 static int
3781 instantiate_virtual_regs_1 (loc, object, extra_insns)
3782 rtx *loc;
3783 rtx object;
3784 int extra_insns;
3786 rtx x;
3787 RTX_CODE code;
3788 rtx new = 0;
3789 HOST_WIDE_INT offset = 0;
3790 rtx temp;
3791 rtx seq;
3792 int i, j;
3793 const char *fmt;
3795 /* Re-start here to avoid recursion in common cases. */
3796 restart:
3798 x = *loc;
3799 if (x == 0)
3800 return 1;
3802 code = GET_CODE (x);
3804 /* Check for some special cases. */
3805 switch (code)
3807 case CONST_INT:
3808 case CONST_DOUBLE:
3809 case CONST_VECTOR:
3810 case CONST:
3811 case SYMBOL_REF:
3812 case CODE_LABEL:
3813 case PC:
3814 case CC0:
3815 case ASM_INPUT:
3816 case ADDR_VEC:
3817 case ADDR_DIFF_VEC:
3818 case RETURN:
3819 return 1;
3821 case SET:
3822 /* We are allowed to set the virtual registers. This means that
3823 the actual register should receive the source minus the
3824 appropriate offset. This is used, for example, in the handling
3825 of non-local gotos. */
3826 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3828 rtx src = SET_SRC (x);
3830 /* We are setting the register, not using it, so the relevant
3831 offset is the negative of the offset to use were we using
3832 the register. */
3833 offset = - offset;
3834 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3836 /* The only valid sources here are PLUS or REG. Just do
3837 the simplest possible thing to handle them. */
3838 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3839 abort ();
3841 start_sequence ();
3842 if (GET_CODE (src) != REG)
3843 temp = force_operand (src, NULL_RTX);
3844 else
3845 temp = src;
3846 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3847 seq = get_insns ();
3848 end_sequence ();
3850 emit_insns_before (seq, object);
3851 SET_DEST (x) = new;
3853 if (! validate_change (object, &SET_SRC (x), temp, 0)
3854 || ! extra_insns)
3855 abort ();
3857 return 1;
3860 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3861 loc = &SET_SRC (x);
3862 goto restart;
3864 case PLUS:
3865 /* Handle special case of virtual register plus constant. */
3866 if (CONSTANT_P (XEXP (x, 1)))
3868 rtx old, new_offset;
3870 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3871 if (GET_CODE (XEXP (x, 0)) == PLUS)
3873 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3875 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3876 extra_insns);
3877 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3879 else
3881 loc = &XEXP (x, 0);
3882 goto restart;
3886 #ifdef POINTERS_EXTEND_UNSIGNED
3887 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3888 we can commute the PLUS and SUBREG because pointers into the
3889 frame are well-behaved. */
3890 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3891 && GET_CODE (XEXP (x, 1)) == CONST_INT
3892 && 0 != (new
3893 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3894 &offset))
3895 && validate_change (object, loc,
3896 plus_constant (gen_lowpart (ptr_mode,
3897 new),
3898 offset
3899 + INTVAL (XEXP (x, 1))),
3901 return 1;
3902 #endif
3903 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3905 /* We know the second operand is a constant. Unless the
3906 first operand is a REG (which has been already checked),
3907 it needs to be checked. */
3908 if (GET_CODE (XEXP (x, 0)) != REG)
3910 loc = &XEXP (x, 0);
3911 goto restart;
3913 return 1;
3916 new_offset = plus_constant (XEXP (x, 1), offset);
3918 /* If the new constant is zero, try to replace the sum with just
3919 the register. */
3920 if (new_offset == const0_rtx
3921 && validate_change (object, loc, new, 0))
3922 return 1;
3924 /* Next try to replace the register and new offset.
3925 There are two changes to validate here and we can't assume that
3926 in the case of old offset equals new just changing the register
3927 will yield a valid insn. In the interests of a little efficiency,
3928 however, we only call validate change once (we don't queue up the
3929 changes and then call apply_change_group). */
3931 old = XEXP (x, 0);
3932 if (offset == 0
3933 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3934 : (XEXP (x, 0) = new,
3935 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3937 if (! extra_insns)
3939 XEXP (x, 0) = old;
3940 return 0;
3943 /* Otherwise copy the new constant into a register and replace
3944 constant with that register. */
3945 temp = gen_reg_rtx (Pmode);
3946 XEXP (x, 0) = new;
3947 if (validate_change (object, &XEXP (x, 1), temp, 0))
3948 emit_insn_before (gen_move_insn (temp, new_offset), object);
3949 else
3951 /* If that didn't work, replace this expression with a
3952 register containing the sum. */
3954 XEXP (x, 0) = old;
3955 new = gen_rtx_PLUS (Pmode, new, new_offset);
3957 start_sequence ();
3958 temp = force_operand (new, NULL_RTX);
3959 seq = get_insns ();
3960 end_sequence ();
3962 emit_insns_before (seq, object);
3963 if (! validate_change (object, loc, temp, 0)
3964 && ! validate_replace_rtx (x, temp, object))
3965 abort ();
3969 return 1;
3972 /* Fall through to generic two-operand expression case. */
3973 case EXPR_LIST:
3974 case CALL:
3975 case COMPARE:
3976 case MINUS:
3977 case MULT:
3978 case DIV: case UDIV:
3979 case MOD: case UMOD:
3980 case AND: case IOR: case XOR:
3981 case ROTATERT: case ROTATE:
3982 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3983 case NE: case EQ:
3984 case GE: case GT: case GEU: case GTU:
3985 case LE: case LT: case LEU: case LTU:
3986 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3987 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3988 loc = &XEXP (x, 0);
3989 goto restart;
3991 case MEM:
3992 /* Most cases of MEM that convert to valid addresses have already been
3993 handled by our scan of decls. The only special handling we
3994 need here is to make a copy of the rtx to ensure it isn't being
3995 shared if we have to change it to a pseudo.
3997 If the rtx is a simple reference to an address via a virtual register,
3998 it can potentially be shared. In such cases, first try to make it
3999 a valid address, which can also be shared. Otherwise, copy it and
4000 proceed normally.
4002 First check for common cases that need no processing. These are
4003 usually due to instantiation already being done on a previous instance
4004 of a shared rtx. */
4006 temp = XEXP (x, 0);
4007 if (CONSTANT_ADDRESS_P (temp)
4008 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4009 || temp == arg_pointer_rtx
4010 #endif
4011 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4012 || temp == hard_frame_pointer_rtx
4013 #endif
4014 || temp == frame_pointer_rtx)
4015 return 1;
4017 if (GET_CODE (temp) == PLUS
4018 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4019 && (XEXP (temp, 0) == frame_pointer_rtx
4020 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4021 || XEXP (temp, 0) == hard_frame_pointer_rtx
4022 #endif
4023 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4024 || XEXP (temp, 0) == arg_pointer_rtx
4025 #endif
4027 return 1;
4029 if (temp == virtual_stack_vars_rtx
4030 || temp == virtual_incoming_args_rtx
4031 || (GET_CODE (temp) == PLUS
4032 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4033 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4034 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4036 /* This MEM may be shared. If the substitution can be done without
4037 the need to generate new pseudos, we want to do it in place
4038 so all copies of the shared rtx benefit. The call below will
4039 only make substitutions if the resulting address is still
4040 valid.
4042 Note that we cannot pass X as the object in the recursive call
4043 since the insn being processed may not allow all valid
4044 addresses. However, if we were not passed on object, we can
4045 only modify X without copying it if X will have a valid
4046 address.
4048 ??? Also note that this can still lose if OBJECT is an insn that
4049 has less restrictions on an address that some other insn.
4050 In that case, we will modify the shared address. This case
4051 doesn't seem very likely, though. One case where this could
4052 happen is in the case of a USE or CLOBBER reference, but we
4053 take care of that below. */
4055 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4056 object ? object : x, 0))
4057 return 1;
4059 /* Otherwise make a copy and process that copy. We copy the entire
4060 RTL expression since it might be a PLUS which could also be
4061 shared. */
4062 *loc = x = copy_rtx (x);
4065 /* Fall through to generic unary operation case. */
4066 case PREFETCH:
4067 case SUBREG:
4068 case STRICT_LOW_PART:
4069 case NEG: case NOT:
4070 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4071 case SIGN_EXTEND: case ZERO_EXTEND:
4072 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4073 case FLOAT: case FIX:
4074 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4075 case ABS:
4076 case SQRT:
4077 case FFS:
4078 /* These case either have just one operand or we know that we need not
4079 check the rest of the operands. */
4080 loc = &XEXP (x, 0);
4081 goto restart;
4083 case USE:
4084 case CLOBBER:
4085 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4086 go ahead and make the invalid one, but do it to a copy. For a REG,
4087 just make the recursive call, since there's no chance of a problem. */
4089 if ((GET_CODE (XEXP (x, 0)) == MEM
4090 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4092 || (GET_CODE (XEXP (x, 0)) == REG
4093 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4094 return 1;
4096 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4097 loc = &XEXP (x, 0);
4098 goto restart;
4100 case REG:
4101 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4102 in front of this insn and substitute the temporary. */
4103 if ((new = instantiate_new_reg (x, &offset)) != 0)
4105 temp = plus_constant (new, offset);
4106 if (!validate_change (object, loc, temp, 0))
4108 if (! extra_insns)
4109 return 0;
4111 start_sequence ();
4112 temp = force_operand (temp, NULL_RTX);
4113 seq = get_insns ();
4114 end_sequence ();
4116 emit_insns_before (seq, object);
4117 if (! validate_change (object, loc, temp, 0)
4118 && ! validate_replace_rtx (x, temp, object))
4119 abort ();
4123 return 1;
4125 case ADDRESSOF:
4126 if (GET_CODE (XEXP (x, 0)) == REG)
4127 return 1;
4129 else if (GET_CODE (XEXP (x, 0)) == MEM)
4131 /* If we have a (addressof (mem ..)), do any instantiation inside
4132 since we know we'll be making the inside valid when we finally
4133 remove the ADDRESSOF. */
4134 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4135 return 1;
4137 break;
4139 default:
4140 break;
4143 /* Scan all subexpressions. */
4144 fmt = GET_RTX_FORMAT (code);
4145 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4146 if (*fmt == 'e')
4148 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4149 return 0;
4151 else if (*fmt == 'E')
4152 for (j = 0; j < XVECLEN (x, i); j++)
4153 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4154 extra_insns))
4155 return 0;
4157 return 1;
4160 /* Optimization: assuming this function does not receive nonlocal gotos,
4161 delete the handlers for such, as well as the insns to establish
4162 and disestablish them. */
4164 static void
4165 delete_handlers ()
4167 rtx insn;
4168 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4170 /* Delete the handler by turning off the flag that would
4171 prevent jump_optimize from deleting it.
4172 Also permit deletion of the nonlocal labels themselves
4173 if nothing local refers to them. */
4174 if (GET_CODE (insn) == CODE_LABEL)
4176 tree t, last_t;
4178 LABEL_PRESERVE_P (insn) = 0;
4180 /* Remove it from the nonlocal_label list, to avoid confusing
4181 flow. */
4182 for (t = nonlocal_labels, last_t = 0; t;
4183 last_t = t, t = TREE_CHAIN (t))
4184 if (DECL_RTL (TREE_VALUE (t)) == insn)
4185 break;
4186 if (t)
4188 if (! last_t)
4189 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4190 else
4191 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4194 if (GET_CODE (insn) == INSN)
4196 int can_delete = 0;
4197 rtx t;
4198 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4199 if (reg_mentioned_p (t, PATTERN (insn)))
4201 can_delete = 1;
4202 break;
4204 if (can_delete
4205 || (nonlocal_goto_stack_level != 0
4206 && reg_mentioned_p (nonlocal_goto_stack_level,
4207 PATTERN (insn))))
4208 delete_related_insns (insn);
4214 max_parm_reg_num ()
4216 return max_parm_reg;
4219 /* Return the first insn following those generated by `assign_parms'. */
4222 get_first_nonparm_insn ()
4224 if (last_parm_insn)
4225 return NEXT_INSN (last_parm_insn);
4226 return get_insns ();
4229 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4230 Crash if there is none. */
4233 get_first_block_beg ()
4235 rtx searcher;
4236 rtx insn = get_first_nonparm_insn ();
4238 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4239 if (GET_CODE (searcher) == NOTE
4240 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4241 return searcher;
4243 abort (); /* Invalid call to this function. (See comments above.) */
4244 return NULL_RTX;
4247 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4248 This means a type for which function calls must pass an address to the
4249 function or get an address back from the function.
4250 EXP may be a type node or an expression (whose type is tested). */
4253 aggregate_value_p (exp)
4254 tree exp;
4256 int i, regno, nregs;
4257 rtx reg;
4259 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4261 if (TREE_CODE (type) == VOID_TYPE)
4262 return 0;
4263 if (RETURN_IN_MEMORY (type))
4264 return 1;
4265 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4266 and thus can't be returned in registers. */
4267 if (TREE_ADDRESSABLE (type))
4268 return 1;
4269 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4270 return 1;
4271 /* Make sure we have suitable call-clobbered regs to return
4272 the value in; if not, we must return it in memory. */
4273 reg = hard_function_value (type, 0, 0);
4275 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4276 it is OK. */
4277 if (GET_CODE (reg) != REG)
4278 return 0;
4280 regno = REGNO (reg);
4281 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4282 for (i = 0; i < nregs; i++)
4283 if (! call_used_regs[regno + i])
4284 return 1;
4285 return 0;
4288 /* Assign RTL expressions to the function's parameters.
4289 This may involve copying them into registers and using
4290 those registers as the RTL for them. */
4292 void
4293 assign_parms (fndecl)
4294 tree fndecl;
4296 tree parm;
4297 rtx entry_parm = 0;
4298 rtx stack_parm = 0;
4299 CUMULATIVE_ARGS args_so_far;
4300 enum machine_mode promoted_mode, passed_mode;
4301 enum machine_mode nominal_mode, promoted_nominal_mode;
4302 int unsignedp;
4303 /* Total space needed so far for args on the stack,
4304 given as a constant and a tree-expression. */
4305 struct args_size stack_args_size;
4306 tree fntype = TREE_TYPE (fndecl);
4307 tree fnargs = DECL_ARGUMENTS (fndecl);
4308 /* This is used for the arg pointer when referring to stack args. */
4309 rtx internal_arg_pointer;
4310 /* This is a dummy PARM_DECL that we used for the function result if
4311 the function returns a structure. */
4312 tree function_result_decl = 0;
4313 #ifdef SETUP_INCOMING_VARARGS
4314 int varargs_setup = 0;
4315 #endif
4316 rtx conversion_insns = 0;
4317 struct args_size alignment_pad;
4319 /* Nonzero if the last arg is named `__builtin_va_alist',
4320 which is used on some machines for old-fashioned non-ANSI varargs.h;
4321 this should be stuck onto the stack as if it had arrived there. */
4322 int hide_last_arg
4323 = (current_function_varargs
4324 && fnargs
4325 && (parm = tree_last (fnargs)) != 0
4326 && DECL_NAME (parm)
4327 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4328 "__builtin_va_alist")));
4330 /* Nonzero if function takes extra anonymous args.
4331 This means the last named arg must be on the stack
4332 right before the anonymous ones. */
4333 int stdarg
4334 = (TYPE_ARG_TYPES (fntype) != 0
4335 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4336 != void_type_node));
4338 current_function_stdarg = stdarg;
4340 /* If the reg that the virtual arg pointer will be translated into is
4341 not a fixed reg or is the stack pointer, make a copy of the virtual
4342 arg pointer, and address parms via the copy. The frame pointer is
4343 considered fixed even though it is not marked as such.
4345 The second time through, simply use ap to avoid generating rtx. */
4347 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4348 || ! (fixed_regs[ARG_POINTER_REGNUM]
4349 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4350 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4351 else
4352 internal_arg_pointer = virtual_incoming_args_rtx;
4353 current_function_internal_arg_pointer = internal_arg_pointer;
4355 stack_args_size.constant = 0;
4356 stack_args_size.var = 0;
4358 /* If struct value address is treated as the first argument, make it so. */
4359 if (aggregate_value_p (DECL_RESULT (fndecl))
4360 && ! current_function_returns_pcc_struct
4361 && struct_value_incoming_rtx == 0)
4363 tree type = build_pointer_type (TREE_TYPE (fntype));
4365 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4367 DECL_ARG_TYPE (function_result_decl) = type;
4368 TREE_CHAIN (function_result_decl) = fnargs;
4369 fnargs = function_result_decl;
4372 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4373 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4375 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4376 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4377 #else
4378 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4379 #endif
4381 /* We haven't yet found an argument that we must push and pretend the
4382 caller did. */
4383 current_function_pretend_args_size = 0;
4385 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4387 struct args_size stack_offset;
4388 struct args_size arg_size;
4389 int passed_pointer = 0;
4390 int did_conversion = 0;
4391 tree passed_type = DECL_ARG_TYPE (parm);
4392 tree nominal_type = TREE_TYPE (parm);
4393 int pretend_named;
4394 int last_named = 0, named_arg;
4396 /* Set LAST_NAMED if this is last named arg before last
4397 anonymous args. */
4398 if (stdarg || current_function_varargs)
4400 tree tem;
4402 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4403 if (DECL_NAME (tem))
4404 break;
4406 if (tem == 0)
4407 last_named = 1;
4409 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4410 most machines, if this is a varargs/stdarg function, then we treat
4411 the last named arg as if it were anonymous too. */
4412 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4414 if (TREE_TYPE (parm) == error_mark_node
4415 /* This can happen after weird syntax errors
4416 or if an enum type is defined among the parms. */
4417 || TREE_CODE (parm) != PARM_DECL
4418 || passed_type == NULL)
4420 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4421 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4422 TREE_USED (parm) = 1;
4423 continue;
4426 /* For varargs.h function, save info about regs and stack space
4427 used by the individual args, not including the va_alist arg. */
4428 if (hide_last_arg && last_named)
4429 current_function_args_info = args_so_far;
4431 /* Find mode of arg as it is passed, and mode of arg
4432 as it should be during execution of this function. */
4433 passed_mode = TYPE_MODE (passed_type);
4434 nominal_mode = TYPE_MODE (nominal_type);
4436 /* If the parm's mode is VOID, its value doesn't matter,
4437 and avoid the usual things like emit_move_insn that could crash. */
4438 if (nominal_mode == VOIDmode)
4440 SET_DECL_RTL (parm, const0_rtx);
4441 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4442 continue;
4445 /* If the parm is to be passed as a transparent union, use the
4446 type of the first field for the tests below. We have already
4447 verified that the modes are the same. */
4448 if (DECL_TRANSPARENT_UNION (parm)
4449 || (TREE_CODE (passed_type) == UNION_TYPE
4450 && TYPE_TRANSPARENT_UNION (passed_type)))
4451 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4453 /* See if this arg was passed by invisible reference. It is if
4454 it is an object whose size depends on the contents of the
4455 object itself or if the machine requires these objects be passed
4456 that way. */
4458 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4459 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4460 || TREE_ADDRESSABLE (passed_type)
4461 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4462 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4463 passed_type, named_arg)
4464 #endif
4467 passed_type = nominal_type = build_pointer_type (passed_type);
4468 passed_pointer = 1;
4469 passed_mode = nominal_mode = Pmode;
4472 promoted_mode = passed_mode;
4474 #ifdef PROMOTE_FUNCTION_ARGS
4475 /* Compute the mode in which the arg is actually extended to. */
4476 unsignedp = TREE_UNSIGNED (passed_type);
4477 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4478 #endif
4480 /* Let machine desc say which reg (if any) the parm arrives in.
4481 0 means it arrives on the stack. */
4482 #ifdef FUNCTION_INCOMING_ARG
4483 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4484 passed_type, named_arg);
4485 #else
4486 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4487 passed_type, named_arg);
4488 #endif
4490 if (entry_parm == 0)
4491 promoted_mode = passed_mode;
4493 #ifdef SETUP_INCOMING_VARARGS
4494 /* If this is the last named parameter, do any required setup for
4495 varargs or stdargs. We need to know about the case of this being an
4496 addressable type, in which case we skip the registers it
4497 would have arrived in.
4499 For stdargs, LAST_NAMED will be set for two parameters, the one that
4500 is actually the last named, and the dummy parameter. We only
4501 want to do this action once.
4503 Also, indicate when RTL generation is to be suppressed. */
4504 if (last_named && !varargs_setup)
4506 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4507 current_function_pretend_args_size, 0);
4508 varargs_setup = 1;
4510 #endif
4512 /* Determine parm's home in the stack,
4513 in case it arrives in the stack or we should pretend it did.
4515 Compute the stack position and rtx where the argument arrives
4516 and its size.
4518 There is one complexity here: If this was a parameter that would
4519 have been passed in registers, but wasn't only because it is
4520 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4521 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4522 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4523 0 as it was the previous time. */
4525 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4526 locate_and_pad_parm (promoted_mode, passed_type,
4527 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4529 #else
4530 #ifdef FUNCTION_INCOMING_ARG
4531 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4532 passed_type,
4533 pretend_named) != 0,
4534 #else
4535 FUNCTION_ARG (args_so_far, promoted_mode,
4536 passed_type,
4537 pretend_named) != 0,
4538 #endif
4539 #endif
4540 fndecl, &stack_args_size, &stack_offset, &arg_size,
4541 &alignment_pad);
4544 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4546 if (offset_rtx == const0_rtx)
4547 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4548 else
4549 stack_parm = gen_rtx_MEM (promoted_mode,
4550 gen_rtx_PLUS (Pmode,
4551 internal_arg_pointer,
4552 offset_rtx));
4554 set_mem_attributes (stack_parm, parm, 1);
4557 /* If this parameter was passed both in registers and in the stack,
4558 use the copy on the stack. */
4559 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4560 entry_parm = 0;
4562 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4563 /* If this parm was passed part in regs and part in memory,
4564 pretend it arrived entirely in memory
4565 by pushing the register-part onto the stack.
4567 In the special case of a DImode or DFmode that is split,
4568 we could put it together in a pseudoreg directly,
4569 but for now that's not worth bothering with. */
4571 if (entry_parm)
4573 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4574 passed_type, named_arg);
4576 if (nregs > 0)
4578 current_function_pretend_args_size
4579 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4580 / (PARM_BOUNDARY / BITS_PER_UNIT)
4581 * (PARM_BOUNDARY / BITS_PER_UNIT));
4583 /* Handle calls that pass values in multiple non-contiguous
4584 locations. The Irix 6 ABI has examples of this. */
4585 if (GET_CODE (entry_parm) == PARALLEL)
4586 emit_group_store (validize_mem (stack_parm), entry_parm,
4587 int_size_in_bytes (TREE_TYPE (parm)));
4589 else
4590 move_block_from_reg (REGNO (entry_parm),
4591 validize_mem (stack_parm), nregs,
4592 int_size_in_bytes (TREE_TYPE (parm)));
4594 entry_parm = stack_parm;
4597 #endif
4599 /* If we didn't decide this parm came in a register,
4600 by default it came on the stack. */
4601 if (entry_parm == 0)
4602 entry_parm = stack_parm;
4604 /* Record permanently how this parm was passed. */
4605 DECL_INCOMING_RTL (parm) = entry_parm;
4607 /* If there is actually space on the stack for this parm,
4608 count it in stack_args_size; otherwise set stack_parm to 0
4609 to indicate there is no preallocated stack slot for the parm. */
4611 if (entry_parm == stack_parm
4612 || (GET_CODE (entry_parm) == PARALLEL
4613 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4614 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4615 /* On some machines, even if a parm value arrives in a register
4616 there is still an (uninitialized) stack slot allocated for it.
4618 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4619 whether this parameter already has a stack slot allocated,
4620 because an arg block exists only if current_function_args_size
4621 is larger than some threshold, and we haven't calculated that
4622 yet. So, for now, we just assume that stack slots never exist
4623 in this case. */
4624 || REG_PARM_STACK_SPACE (fndecl) > 0
4625 #endif
4628 stack_args_size.constant += arg_size.constant;
4629 if (arg_size.var)
4630 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4632 else
4633 /* No stack slot was pushed for this parm. */
4634 stack_parm = 0;
4636 /* Update info on where next arg arrives in registers. */
4638 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4639 passed_type, named_arg);
4641 /* If we can't trust the parm stack slot to be aligned enough
4642 for its ultimate type, don't use that slot after entry.
4643 We'll make another stack slot, if we need one. */
4645 unsigned int thisparm_boundary
4646 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4648 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4649 stack_parm = 0;
4652 /* If parm was passed in memory, and we need to convert it on entry,
4653 don't store it back in that same slot. */
4654 if (entry_parm != 0
4655 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4656 stack_parm = 0;
4658 /* When an argument is passed in multiple locations, we can't
4659 make use of this information, but we can save some copying if
4660 the whole argument is passed in a single register. */
4661 if (GET_CODE (entry_parm) == PARALLEL
4662 && nominal_mode != BLKmode && passed_mode != BLKmode)
4664 int i, len = XVECLEN (entry_parm, 0);
4666 for (i = 0; i < len; i++)
4667 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4668 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4669 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4670 == passed_mode)
4671 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4673 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4674 DECL_INCOMING_RTL (parm) = entry_parm;
4675 break;
4679 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4680 in the mode in which it arrives.
4681 STACK_PARM is an RTX for a stack slot where the parameter can live
4682 during the function (in case we want to put it there).
4683 STACK_PARM is 0 if no stack slot was pushed for it.
4685 Now output code if necessary to convert ENTRY_PARM to
4686 the type in which this function declares it,
4687 and store that result in an appropriate place,
4688 which may be a pseudo reg, may be STACK_PARM,
4689 or may be a local stack slot if STACK_PARM is 0.
4691 Set DECL_RTL to that place. */
4693 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4695 /* If a BLKmode arrives in registers, copy it to a stack slot.
4696 Handle calls that pass values in multiple non-contiguous
4697 locations. The Irix 6 ABI has examples of this. */
4698 if (GET_CODE (entry_parm) == REG
4699 || GET_CODE (entry_parm) == PARALLEL)
4701 int size_stored
4702 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4703 UNITS_PER_WORD);
4705 /* Note that we will be storing an integral number of words.
4706 So we have to be careful to ensure that we allocate an
4707 integral number of words. We do this below in the
4708 assign_stack_local if space was not allocated in the argument
4709 list. If it was, this will not work if PARM_BOUNDARY is not
4710 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4711 if it becomes a problem. */
4713 if (stack_parm == 0)
4715 stack_parm
4716 = assign_stack_local (GET_MODE (entry_parm),
4717 size_stored, 0);
4718 set_mem_attributes (stack_parm, parm, 1);
4721 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4722 abort ();
4724 /* Handle calls that pass values in multiple non-contiguous
4725 locations. The Irix 6 ABI has examples of this. */
4726 if (GET_CODE (entry_parm) == PARALLEL)
4727 emit_group_store (validize_mem (stack_parm), entry_parm,
4728 int_size_in_bytes (TREE_TYPE (parm)));
4729 else
4730 move_block_from_reg (REGNO (entry_parm),
4731 validize_mem (stack_parm),
4732 size_stored / UNITS_PER_WORD,
4733 int_size_in_bytes (TREE_TYPE (parm)));
4735 SET_DECL_RTL (parm, stack_parm);
4737 else if (! ((! optimize
4738 && ! DECL_REGISTER (parm))
4739 || TREE_SIDE_EFFECTS (parm)
4740 /* If -ffloat-store specified, don't put explicit
4741 float variables into registers. */
4742 || (flag_float_store
4743 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4744 /* Always assign pseudo to structure return or item passed
4745 by invisible reference. */
4746 || passed_pointer || parm == function_result_decl)
4748 /* Store the parm in a pseudoregister during the function, but we
4749 may need to do it in a wider mode. */
4751 rtx parmreg;
4752 unsigned int regno, regnoi = 0, regnor = 0;
4754 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4756 promoted_nominal_mode
4757 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4759 parmreg = gen_reg_rtx (promoted_nominal_mode);
4760 mark_user_reg (parmreg);
4762 /* If this was an item that we received a pointer to, set DECL_RTL
4763 appropriately. */
4764 if (passed_pointer)
4766 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4767 parmreg);
4768 set_mem_attributes (x, parm, 1);
4769 SET_DECL_RTL (parm, x);
4771 else
4773 SET_DECL_RTL (parm, parmreg);
4774 maybe_set_unchanging (DECL_RTL (parm), parm);
4777 /* Copy the value into the register. */
4778 if (nominal_mode != passed_mode
4779 || promoted_nominal_mode != promoted_mode)
4781 int save_tree_used;
4782 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4783 mode, by the caller. We now have to convert it to
4784 NOMINAL_MODE, if different. However, PARMREG may be in
4785 a different mode than NOMINAL_MODE if it is being stored
4786 promoted.
4788 If ENTRY_PARM is a hard register, it might be in a register
4789 not valid for operating in its mode (e.g., an odd-numbered
4790 register for a DFmode). In that case, moves are the only
4791 thing valid, so we can't do a convert from there. This
4792 occurs when the calling sequence allow such misaligned
4793 usages.
4795 In addition, the conversion may involve a call, which could
4796 clobber parameters which haven't been copied to pseudo
4797 registers yet. Therefore, we must first copy the parm to
4798 a pseudo reg here, and save the conversion until after all
4799 parameters have been moved. */
4801 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4803 emit_move_insn (tempreg, validize_mem (entry_parm));
4805 push_to_sequence (conversion_insns);
4806 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4808 if (GET_CODE (tempreg) == SUBREG
4809 && GET_MODE (tempreg) == nominal_mode
4810 && GET_CODE (SUBREG_REG (tempreg)) == REG
4811 && nominal_mode == passed_mode
4812 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4813 && GET_MODE_SIZE (GET_MODE (tempreg))
4814 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4816 /* The argument is already sign/zero extended, so note it
4817 into the subreg. */
4818 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4819 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4822 /* TREE_USED gets set erroneously during expand_assignment. */
4823 save_tree_used = TREE_USED (parm);
4824 expand_assignment (parm,
4825 make_tree (nominal_type, tempreg), 0, 0);
4826 TREE_USED (parm) = save_tree_used;
4827 conversion_insns = get_insns ();
4828 did_conversion = 1;
4829 end_sequence ();
4831 else
4832 emit_move_insn (parmreg, validize_mem (entry_parm));
4834 /* If we were passed a pointer but the actual value
4835 can safely live in a register, put it in one. */
4836 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4837 /* If by-reference argument was promoted, demote it. */
4838 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4839 || ! ((! optimize
4840 && ! DECL_REGISTER (parm))
4841 || TREE_SIDE_EFFECTS (parm)
4842 /* If -ffloat-store specified, don't put explicit
4843 float variables into registers. */
4844 || (flag_float_store
4845 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4847 /* We can't use nominal_mode, because it will have been set to
4848 Pmode above. We must use the actual mode of the parm. */
4849 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4850 mark_user_reg (parmreg);
4851 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4853 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4854 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4855 push_to_sequence (conversion_insns);
4856 emit_move_insn (tempreg, DECL_RTL (parm));
4857 SET_DECL_RTL (parm,
4858 convert_to_mode (GET_MODE (parmreg),
4859 tempreg,
4860 unsigned_p));
4861 emit_move_insn (parmreg, DECL_RTL (parm));
4862 conversion_insns = get_insns();
4863 did_conversion = 1;
4864 end_sequence ();
4866 else
4867 emit_move_insn (parmreg, DECL_RTL (parm));
4868 SET_DECL_RTL (parm, parmreg);
4869 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4870 now the parm. */
4871 stack_parm = 0;
4873 #ifdef FUNCTION_ARG_CALLEE_COPIES
4874 /* If we are passed an arg by reference and it is our responsibility
4875 to make a copy, do it now.
4876 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4877 original argument, so we must recreate them in the call to
4878 FUNCTION_ARG_CALLEE_COPIES. */
4879 /* ??? Later add code to handle the case that if the argument isn't
4880 modified, don't do the copy. */
4882 else if (passed_pointer
4883 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4884 TYPE_MODE (DECL_ARG_TYPE (parm)),
4885 DECL_ARG_TYPE (parm),
4886 named_arg)
4887 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4889 rtx copy;
4890 tree type = DECL_ARG_TYPE (parm);
4892 /* This sequence may involve a library call perhaps clobbering
4893 registers that haven't been copied to pseudos yet. */
4895 push_to_sequence (conversion_insns);
4897 if (!COMPLETE_TYPE_P (type)
4898 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4899 /* This is a variable sized object. */
4900 copy = gen_rtx_MEM (BLKmode,
4901 allocate_dynamic_stack_space
4902 (expr_size (parm), NULL_RTX,
4903 TYPE_ALIGN (type)));
4904 else
4905 copy = assign_stack_temp (TYPE_MODE (type),
4906 int_size_in_bytes (type), 1);
4907 set_mem_attributes (copy, parm, 1);
4909 store_expr (parm, copy, 0);
4910 emit_move_insn (parmreg, XEXP (copy, 0));
4911 conversion_insns = get_insns ();
4912 did_conversion = 1;
4913 end_sequence ();
4915 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4917 /* In any case, record the parm's desired stack location
4918 in case we later discover it must live in the stack.
4920 If it is a COMPLEX value, store the stack location for both
4921 halves. */
4923 if (GET_CODE (parmreg) == CONCAT)
4924 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4925 else
4926 regno = REGNO (parmreg);
4928 if (regno >= max_parm_reg)
4930 rtx *new;
4931 int old_max_parm_reg = max_parm_reg;
4933 /* It's slow to expand this one register at a time,
4934 but it's also rare and we need max_parm_reg to be
4935 precisely correct. */
4936 max_parm_reg = regno + 1;
4937 new = (rtx *) xrealloc (parm_reg_stack_loc,
4938 max_parm_reg * sizeof (rtx));
4939 memset ((char *) (new + old_max_parm_reg), 0,
4940 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4941 parm_reg_stack_loc = new;
4944 if (GET_CODE (parmreg) == CONCAT)
4946 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4948 regnor = REGNO (gen_realpart (submode, parmreg));
4949 regnoi = REGNO (gen_imagpart (submode, parmreg));
4951 if (stack_parm != 0)
4953 parm_reg_stack_loc[regnor]
4954 = gen_realpart (submode, stack_parm);
4955 parm_reg_stack_loc[regnoi]
4956 = gen_imagpart (submode, stack_parm);
4958 else
4960 parm_reg_stack_loc[regnor] = 0;
4961 parm_reg_stack_loc[regnoi] = 0;
4964 else
4965 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4967 /* Mark the register as eliminable if we did no conversion
4968 and it was copied from memory at a fixed offset,
4969 and the arg pointer was not copied to a pseudo-reg.
4970 If the arg pointer is a pseudo reg or the offset formed
4971 an invalid address, such memory-equivalences
4972 as we make here would screw up life analysis for it. */
4973 if (nominal_mode == passed_mode
4974 && ! did_conversion
4975 && stack_parm != 0
4976 && GET_CODE (stack_parm) == MEM
4977 && stack_offset.var == 0
4978 && reg_mentioned_p (virtual_incoming_args_rtx,
4979 XEXP (stack_parm, 0)))
4981 rtx linsn = get_last_insn ();
4982 rtx sinsn, set;
4984 /* Mark complex types separately. */
4985 if (GET_CODE (parmreg) == CONCAT)
4986 /* Scan backwards for the set of the real and
4987 imaginary parts. */
4988 for (sinsn = linsn; sinsn != 0;
4989 sinsn = prev_nonnote_insn (sinsn))
4991 set = single_set (sinsn);
4992 if (set != 0
4993 && SET_DEST (set) == regno_reg_rtx [regnoi])
4994 REG_NOTES (sinsn)
4995 = gen_rtx_EXPR_LIST (REG_EQUIV,
4996 parm_reg_stack_loc[regnoi],
4997 REG_NOTES (sinsn));
4998 else if (set != 0
4999 && SET_DEST (set) == regno_reg_rtx [regnor])
5000 REG_NOTES (sinsn)
5001 = gen_rtx_EXPR_LIST (REG_EQUIV,
5002 parm_reg_stack_loc[regnor],
5003 REG_NOTES (sinsn));
5005 else if ((set = single_set (linsn)) != 0
5006 && SET_DEST (set) == parmreg)
5007 REG_NOTES (linsn)
5008 = gen_rtx_EXPR_LIST (REG_EQUIV,
5009 stack_parm, REG_NOTES (linsn));
5012 /* For pointer data type, suggest pointer register. */
5013 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5014 mark_reg_pointer (parmreg,
5015 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5017 /* If something wants our address, try to use ADDRESSOF. */
5018 if (TREE_ADDRESSABLE (parm))
5020 /* If we end up putting something into the stack,
5021 fixup_var_refs_insns will need to make a pass over
5022 all the instructions. It looks through the pending
5023 sequences -- but it can't see the ones in the
5024 CONVERSION_INSNS, if they're not on the sequence
5025 stack. So, we go back to that sequence, just so that
5026 the fixups will happen. */
5027 push_to_sequence (conversion_insns);
5028 put_var_into_stack (parm);
5029 conversion_insns = get_insns ();
5030 end_sequence ();
5033 else
5035 /* Value must be stored in the stack slot STACK_PARM
5036 during function execution. */
5038 if (promoted_mode != nominal_mode)
5040 /* Conversion is required. */
5041 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5043 emit_move_insn (tempreg, validize_mem (entry_parm));
5045 push_to_sequence (conversion_insns);
5046 entry_parm = convert_to_mode (nominal_mode, tempreg,
5047 TREE_UNSIGNED (TREE_TYPE (parm)));
5048 if (stack_parm)
5049 /* ??? This may need a big-endian conversion on sparc64. */
5050 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5052 conversion_insns = get_insns ();
5053 did_conversion = 1;
5054 end_sequence ();
5057 if (entry_parm != stack_parm)
5059 if (stack_parm == 0)
5061 stack_parm
5062 = assign_stack_local (GET_MODE (entry_parm),
5063 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5064 set_mem_attributes (stack_parm, parm, 1);
5067 if (promoted_mode != nominal_mode)
5069 push_to_sequence (conversion_insns);
5070 emit_move_insn (validize_mem (stack_parm),
5071 validize_mem (entry_parm));
5072 conversion_insns = get_insns ();
5073 end_sequence ();
5075 else
5076 emit_move_insn (validize_mem (stack_parm),
5077 validize_mem (entry_parm));
5080 SET_DECL_RTL (parm, stack_parm);
5083 /* If this "parameter" was the place where we are receiving the
5084 function's incoming structure pointer, set up the result. */
5085 if (parm == function_result_decl)
5087 tree result = DECL_RESULT (fndecl);
5088 rtx addr = DECL_RTL (parm);
5089 rtx x;
5091 #ifdef POINTERS_EXTEND_UNSIGNED
5092 if (GET_MODE (addr) != Pmode)
5093 addr = convert_memory_address (Pmode, addr);
5094 #endif
5096 x = gen_rtx_MEM (DECL_MODE (result), addr);
5097 set_mem_attributes (x, result, 1);
5098 SET_DECL_RTL (result, x);
5101 if (GET_CODE (DECL_RTL (parm)) == REG)
5102 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5103 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5105 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5106 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5111 /* Output all parameter conversion instructions (possibly including calls)
5112 now that all parameters have been copied out of hard registers. */
5113 emit_insns (conversion_insns);
5115 last_parm_insn = get_last_insn ();
5117 current_function_args_size = stack_args_size.constant;
5119 /* Adjust function incoming argument size for alignment and
5120 minimum length. */
5122 #ifdef REG_PARM_STACK_SPACE
5123 #ifndef MAYBE_REG_PARM_STACK_SPACE
5124 current_function_args_size = MAX (current_function_args_size,
5125 REG_PARM_STACK_SPACE (fndecl));
5126 #endif
5127 #endif
5129 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5131 current_function_args_size
5132 = ((current_function_args_size + STACK_BYTES - 1)
5133 / STACK_BYTES) * STACK_BYTES;
5135 #ifdef ARGS_GROW_DOWNWARD
5136 current_function_arg_offset_rtx
5137 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5138 : expand_expr (size_diffop (stack_args_size.var,
5139 size_int (-stack_args_size.constant)),
5140 NULL_RTX, VOIDmode, 0));
5141 #else
5142 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5143 #endif
5145 /* See how many bytes, if any, of its args a function should try to pop
5146 on return. */
5148 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5149 current_function_args_size);
5151 /* For stdarg.h function, save info about
5152 regs and stack space used by the named args. */
5154 if (!hide_last_arg)
5155 current_function_args_info = args_so_far;
5157 /* Set the rtx used for the function return value. Put this in its
5158 own variable so any optimizers that need this information don't have
5159 to include tree.h. Do this here so it gets done when an inlined
5160 function gets output. */
5162 current_function_return_rtx
5163 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5164 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5166 /* If scalar return value was computed in a pseudo-reg, or was a named
5167 return value that got dumped to the stack, copy that to the hard
5168 return register. */
5169 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5171 tree decl_result = DECL_RESULT (fndecl);
5172 rtx decl_rtl = DECL_RTL (decl_result);
5174 if (REG_P (decl_rtl)
5175 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5176 : DECL_REGISTER (decl_result))
5178 rtx real_decl_rtl;
5180 #ifdef FUNCTION_OUTGOING_VALUE
5181 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5182 fndecl);
5183 #else
5184 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5185 fndecl);
5186 #endif
5187 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5188 /* The delay slot scheduler assumes that current_function_return_rtx
5189 holds the hard register containing the return value, not a
5190 temporary pseudo. */
5191 current_function_return_rtx = real_decl_rtl;
5196 /* Indicate whether REGNO is an incoming argument to the current function
5197 that was promoted to a wider mode. If so, return the RTX for the
5198 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5199 that REGNO is promoted from and whether the promotion was signed or
5200 unsigned. */
5202 #ifdef PROMOTE_FUNCTION_ARGS
5205 promoted_input_arg (regno, pmode, punsignedp)
5206 unsigned int regno;
5207 enum machine_mode *pmode;
5208 int *punsignedp;
5210 tree arg;
5212 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5213 arg = TREE_CHAIN (arg))
5214 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5215 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5216 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5218 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5219 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5221 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5222 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5223 && mode != DECL_MODE (arg))
5225 *pmode = DECL_MODE (arg);
5226 *punsignedp = unsignedp;
5227 return DECL_INCOMING_RTL (arg);
5231 return 0;
5234 #endif
5236 /* Compute the size and offset from the start of the stacked arguments for a
5237 parm passed in mode PASSED_MODE and with type TYPE.
5239 INITIAL_OFFSET_PTR points to the current offset into the stacked
5240 arguments.
5242 The starting offset and size for this parm are returned in *OFFSET_PTR
5243 and *ARG_SIZE_PTR, respectively.
5245 IN_REGS is non-zero if the argument will be passed in registers. It will
5246 never be set if REG_PARM_STACK_SPACE is not defined.
5248 FNDECL is the function in which the argument was defined.
5250 There are two types of rounding that are done. The first, controlled by
5251 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5252 list to be aligned to the specific boundary (in bits). This rounding
5253 affects the initial and starting offsets, but not the argument size.
5255 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5256 optionally rounds the size of the parm to PARM_BOUNDARY. The
5257 initial offset is not affected by this rounding, while the size always
5258 is and the starting offset may be. */
5260 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5261 initial_offset_ptr is positive because locate_and_pad_parm's
5262 callers pass in the total size of args so far as
5263 initial_offset_ptr. arg_size_ptr is always positive. */
5265 void
5266 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5267 initial_offset_ptr, offset_ptr, arg_size_ptr,
5268 alignment_pad)
5269 enum machine_mode passed_mode;
5270 tree type;
5271 int in_regs ATTRIBUTE_UNUSED;
5272 tree fndecl ATTRIBUTE_UNUSED;
5273 struct args_size *initial_offset_ptr;
5274 struct args_size *offset_ptr;
5275 struct args_size *arg_size_ptr;
5276 struct args_size *alignment_pad;
5279 tree sizetree
5280 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5281 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5282 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5284 #ifdef REG_PARM_STACK_SPACE
5285 /* If we have found a stack parm before we reach the end of the
5286 area reserved for registers, skip that area. */
5287 if (! in_regs)
5289 int reg_parm_stack_space = 0;
5291 #ifdef MAYBE_REG_PARM_STACK_SPACE
5292 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5293 #else
5294 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5295 #endif
5296 if (reg_parm_stack_space > 0)
5298 if (initial_offset_ptr->var)
5300 initial_offset_ptr->var
5301 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5302 ssize_int (reg_parm_stack_space));
5303 initial_offset_ptr->constant = 0;
5305 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5306 initial_offset_ptr->constant = reg_parm_stack_space;
5309 #endif /* REG_PARM_STACK_SPACE */
5311 arg_size_ptr->var = 0;
5312 arg_size_ptr->constant = 0;
5313 alignment_pad->var = 0;
5314 alignment_pad->constant = 0;
5316 #ifdef ARGS_GROW_DOWNWARD
5317 if (initial_offset_ptr->var)
5319 offset_ptr->constant = 0;
5320 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5321 initial_offset_ptr->var);
5323 else
5325 offset_ptr->constant = -initial_offset_ptr->constant;
5326 offset_ptr->var = 0;
5328 if (where_pad != none
5329 && (!host_integerp (sizetree, 1)
5330 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5331 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5332 SUB_PARM_SIZE (*offset_ptr, sizetree);
5333 if (where_pad != downward)
5334 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5335 if (initial_offset_ptr->var)
5336 arg_size_ptr->var = size_binop (MINUS_EXPR,
5337 size_binop (MINUS_EXPR,
5338 ssize_int (0),
5339 initial_offset_ptr->var),
5340 offset_ptr->var);
5342 else
5343 arg_size_ptr->constant = (-initial_offset_ptr->constant
5344 - offset_ptr->constant);
5346 #else /* !ARGS_GROW_DOWNWARD */
5347 if (!in_regs
5348 #ifdef REG_PARM_STACK_SPACE
5349 || REG_PARM_STACK_SPACE (fndecl) > 0
5350 #endif
5352 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5353 *offset_ptr = *initial_offset_ptr;
5355 #ifdef PUSH_ROUNDING
5356 if (passed_mode != BLKmode)
5357 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5358 #endif
5360 /* Pad_below needs the pre-rounded size to know how much to pad below
5361 so this must be done before rounding up. */
5362 if (where_pad == downward
5363 /* However, BLKmode args passed in regs have their padding done elsewhere.
5364 The stack slot must be able to hold the entire register. */
5365 && !(in_regs && passed_mode == BLKmode))
5366 pad_below (offset_ptr, passed_mode, sizetree);
5368 if (where_pad != none
5369 && (!host_integerp (sizetree, 1)
5370 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5371 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5373 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5374 #endif /* ARGS_GROW_DOWNWARD */
5377 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5378 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5380 static void
5381 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5382 struct args_size *offset_ptr;
5383 int boundary;
5384 struct args_size *alignment_pad;
5386 tree save_var = NULL_TREE;
5387 HOST_WIDE_INT save_constant = 0;
5389 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5391 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5393 save_var = offset_ptr->var;
5394 save_constant = offset_ptr->constant;
5397 alignment_pad->var = NULL_TREE;
5398 alignment_pad->constant = 0;
5400 if (boundary > BITS_PER_UNIT)
5402 if (offset_ptr->var)
5404 offset_ptr->var =
5405 #ifdef ARGS_GROW_DOWNWARD
5406 round_down
5407 #else
5408 round_up
5409 #endif
5410 (ARGS_SIZE_TREE (*offset_ptr),
5411 boundary / BITS_PER_UNIT);
5412 offset_ptr->constant = 0; /*?*/
5413 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5414 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5415 save_var);
5417 else
5419 offset_ptr->constant =
5420 #ifdef ARGS_GROW_DOWNWARD
5421 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5422 #else
5423 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5424 #endif
5425 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5426 alignment_pad->constant = offset_ptr->constant - save_constant;
5431 #ifndef ARGS_GROW_DOWNWARD
5432 static void
5433 pad_below (offset_ptr, passed_mode, sizetree)
5434 struct args_size *offset_ptr;
5435 enum machine_mode passed_mode;
5436 tree sizetree;
5438 if (passed_mode != BLKmode)
5440 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5441 offset_ptr->constant
5442 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5443 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5444 - GET_MODE_SIZE (passed_mode));
5446 else
5448 if (TREE_CODE (sizetree) != INTEGER_CST
5449 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5451 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5452 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5453 /* Add it in. */
5454 ADD_PARM_SIZE (*offset_ptr, s2);
5455 SUB_PARM_SIZE (*offset_ptr, sizetree);
5459 #endif
5461 /* Walk the tree of blocks describing the binding levels within a function
5462 and warn about uninitialized variables.
5463 This is done after calling flow_analysis and before global_alloc
5464 clobbers the pseudo-regs to hard regs. */
5466 void
5467 uninitialized_vars_warning (block)
5468 tree block;
5470 tree decl, sub;
5471 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5473 if (warn_uninitialized
5474 && TREE_CODE (decl) == VAR_DECL
5475 /* These warnings are unreliable for and aggregates
5476 because assigning the fields one by one can fail to convince
5477 flow.c that the entire aggregate was initialized.
5478 Unions are troublesome because members may be shorter. */
5479 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5480 && DECL_RTL (decl) != 0
5481 && GET_CODE (DECL_RTL (decl)) == REG
5482 /* Global optimizations can make it difficult to determine if a
5483 particular variable has been initialized. However, a VAR_DECL
5484 with a nonzero DECL_INITIAL had an initializer, so do not
5485 claim it is potentially uninitialized.
5487 We do not care about the actual value in DECL_INITIAL, so we do
5488 not worry that it may be a dangling pointer. */
5489 && DECL_INITIAL (decl) == NULL_TREE
5490 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5491 warning_with_decl (decl,
5492 "`%s' might be used uninitialized in this function");
5493 if (extra_warnings
5494 && TREE_CODE (decl) == VAR_DECL
5495 && DECL_RTL (decl) != 0
5496 && GET_CODE (DECL_RTL (decl)) == REG
5497 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5498 warning_with_decl (decl,
5499 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5501 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5502 uninitialized_vars_warning (sub);
5505 /* Do the appropriate part of uninitialized_vars_warning
5506 but for arguments instead of local variables. */
5508 void
5509 setjmp_args_warning ()
5511 tree decl;
5512 for (decl = DECL_ARGUMENTS (current_function_decl);
5513 decl; decl = TREE_CHAIN (decl))
5514 if (DECL_RTL (decl) != 0
5515 && GET_CODE (DECL_RTL (decl)) == REG
5516 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5517 warning_with_decl (decl,
5518 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5521 /* If this function call setjmp, put all vars into the stack
5522 unless they were declared `register'. */
5524 void
5525 setjmp_protect (block)
5526 tree block;
5528 tree decl, sub;
5529 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5530 if ((TREE_CODE (decl) == VAR_DECL
5531 || TREE_CODE (decl) == PARM_DECL)
5532 && DECL_RTL (decl) != 0
5533 && (GET_CODE (DECL_RTL (decl)) == REG
5534 || (GET_CODE (DECL_RTL (decl)) == MEM
5535 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5536 /* If this variable came from an inline function, it must be
5537 that its life doesn't overlap the setjmp. If there was a
5538 setjmp in the function, it would already be in memory. We
5539 must exclude such variable because their DECL_RTL might be
5540 set to strange things such as virtual_stack_vars_rtx. */
5541 && ! DECL_FROM_INLINE (decl)
5542 && (
5543 #ifdef NON_SAVING_SETJMP
5544 /* If longjmp doesn't restore the registers,
5545 don't put anything in them. */
5546 NON_SAVING_SETJMP
5548 #endif
5549 ! DECL_REGISTER (decl)))
5550 put_var_into_stack (decl);
5551 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5552 setjmp_protect (sub);
5555 /* Like the previous function, but for args instead of local variables. */
5557 void
5558 setjmp_protect_args ()
5560 tree decl;
5561 for (decl = DECL_ARGUMENTS (current_function_decl);
5562 decl; decl = TREE_CHAIN (decl))
5563 if ((TREE_CODE (decl) == VAR_DECL
5564 || TREE_CODE (decl) == PARM_DECL)
5565 && DECL_RTL (decl) != 0
5566 && (GET_CODE (DECL_RTL (decl)) == REG
5567 || (GET_CODE (DECL_RTL (decl)) == MEM
5568 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5569 && (
5570 /* If longjmp doesn't restore the registers,
5571 don't put anything in them. */
5572 #ifdef NON_SAVING_SETJMP
5573 NON_SAVING_SETJMP
5575 #endif
5576 ! DECL_REGISTER (decl)))
5577 put_var_into_stack (decl);
5580 /* Return the context-pointer register corresponding to DECL,
5581 or 0 if it does not need one. */
5584 lookup_static_chain (decl)
5585 tree decl;
5587 tree context = decl_function_context (decl);
5588 tree link;
5590 if (context == 0
5591 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5592 return 0;
5594 /* We treat inline_function_decl as an alias for the current function
5595 because that is the inline function whose vars, types, etc.
5596 are being merged into the current function.
5597 See expand_inline_function. */
5598 if (context == current_function_decl || context == inline_function_decl)
5599 return virtual_stack_vars_rtx;
5601 for (link = context_display; link; link = TREE_CHAIN (link))
5602 if (TREE_PURPOSE (link) == context)
5603 return RTL_EXPR_RTL (TREE_VALUE (link));
5605 abort ();
5608 /* Convert a stack slot address ADDR for variable VAR
5609 (from a containing function)
5610 into an address valid in this function (using a static chain). */
5613 fix_lexical_addr (addr, var)
5614 rtx addr;
5615 tree var;
5617 rtx basereg;
5618 HOST_WIDE_INT displacement;
5619 tree context = decl_function_context (var);
5620 struct function *fp;
5621 rtx base = 0;
5623 /* If this is the present function, we need not do anything. */
5624 if (context == current_function_decl || context == inline_function_decl)
5625 return addr;
5627 fp = find_function_data (context);
5629 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5630 addr = XEXP (XEXP (addr, 0), 0);
5632 /* Decode given address as base reg plus displacement. */
5633 if (GET_CODE (addr) == REG)
5634 basereg = addr, displacement = 0;
5635 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5636 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5637 else
5638 abort ();
5640 /* We accept vars reached via the containing function's
5641 incoming arg pointer and via its stack variables pointer. */
5642 if (basereg == fp->internal_arg_pointer)
5644 /* If reached via arg pointer, get the arg pointer value
5645 out of that function's stack frame.
5647 There are two cases: If a separate ap is needed, allocate a
5648 slot in the outer function for it and dereference it that way.
5649 This is correct even if the real ap is actually a pseudo.
5650 Otherwise, just adjust the offset from the frame pointer to
5651 compensate. */
5653 #ifdef NEED_SEPARATE_AP
5654 rtx addr;
5656 addr = get_arg_pointer_save_area (fp);
5657 addr = fix_lexical_addr (XEXP (addr, 0), var);
5658 addr = memory_address (Pmode, addr);
5660 base = gen_rtx_MEM (Pmode, addr);
5661 set_mem_alias_set (base, get_frame_alias_set ());
5662 base = copy_to_reg (base);
5663 #else
5664 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5665 base = lookup_static_chain (var);
5666 #endif
5669 else if (basereg == virtual_stack_vars_rtx)
5671 /* This is the same code as lookup_static_chain, duplicated here to
5672 avoid an extra call to decl_function_context. */
5673 tree link;
5675 for (link = context_display; link; link = TREE_CHAIN (link))
5676 if (TREE_PURPOSE (link) == context)
5678 base = RTL_EXPR_RTL (TREE_VALUE (link));
5679 break;
5683 if (base == 0)
5684 abort ();
5686 /* Use same offset, relative to appropriate static chain or argument
5687 pointer. */
5688 return plus_constant (base, displacement);
5691 /* Return the address of the trampoline for entering nested fn FUNCTION.
5692 If necessary, allocate a trampoline (in the stack frame)
5693 and emit rtl to initialize its contents (at entry to this function). */
5696 trampoline_address (function)
5697 tree function;
5699 tree link;
5700 tree rtlexp;
5701 rtx tramp;
5702 struct function *fp;
5703 tree fn_context;
5705 /* Find an existing trampoline and return it. */
5706 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5707 if (TREE_PURPOSE (link) == function)
5708 return
5709 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5711 for (fp = outer_function_chain; fp; fp = fp->outer)
5712 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5713 if (TREE_PURPOSE (link) == function)
5715 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5716 function);
5717 return adjust_trampoline_addr (tramp);
5720 /* None exists; we must make one. */
5722 /* Find the `struct function' for the function containing FUNCTION. */
5723 fp = 0;
5724 fn_context = decl_function_context (function);
5725 if (fn_context != current_function_decl
5726 && fn_context != inline_function_decl)
5727 fp = find_function_data (fn_context);
5729 /* Allocate run-time space for this trampoline
5730 (usually in the defining function's stack frame). */
5731 #ifdef ALLOCATE_TRAMPOLINE
5732 tramp = ALLOCATE_TRAMPOLINE (fp);
5733 #else
5734 /* If rounding needed, allocate extra space
5735 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5736 #ifdef TRAMPOLINE_ALIGNMENT
5737 #define TRAMPOLINE_REAL_SIZE \
5738 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5739 #else
5740 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5741 #endif
5742 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5743 fp ? fp : cfun);
5744 #endif
5746 /* Record the trampoline for reuse and note it for later initialization
5747 by expand_function_end. */
5748 if (fp != 0)
5750 rtlexp = make_node (RTL_EXPR);
5751 RTL_EXPR_RTL (rtlexp) = tramp;
5752 fp->x_trampoline_list = tree_cons (function, rtlexp,
5753 fp->x_trampoline_list);
5755 else
5757 /* Make the RTL_EXPR node temporary, not momentary, so that the
5758 trampoline_list doesn't become garbage. */
5759 rtlexp = make_node (RTL_EXPR);
5761 RTL_EXPR_RTL (rtlexp) = tramp;
5762 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5765 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5766 return adjust_trampoline_addr (tramp);
5769 /* Given a trampoline address,
5770 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5772 static rtx
5773 round_trampoline_addr (tramp)
5774 rtx tramp;
5776 #ifdef TRAMPOLINE_ALIGNMENT
5777 /* Round address up to desired boundary. */
5778 rtx temp = gen_reg_rtx (Pmode);
5779 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5780 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5782 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5783 temp, 0, OPTAB_LIB_WIDEN);
5784 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5785 temp, 0, OPTAB_LIB_WIDEN);
5786 #endif
5787 return tramp;
5790 /* Given a trampoline address, round it then apply any
5791 platform-specific adjustments so that the result can be used for a
5792 function call . */
5794 static rtx
5795 adjust_trampoline_addr (tramp)
5796 rtx tramp;
5798 tramp = round_trampoline_addr (tramp);
5799 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5800 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5801 #endif
5802 return tramp;
5805 /* Put all this function's BLOCK nodes including those that are chained
5806 onto the first block into a vector, and return it.
5807 Also store in each NOTE for the beginning or end of a block
5808 the index of that block in the vector.
5809 The arguments are BLOCK, the chain of top-level blocks of the function,
5810 and INSNS, the insn chain of the function. */
5812 void
5813 identify_blocks ()
5815 int n_blocks;
5816 tree *block_vector, *last_block_vector;
5817 tree *block_stack;
5818 tree block = DECL_INITIAL (current_function_decl);
5820 if (block == 0)
5821 return;
5823 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5824 depth-first order. */
5825 block_vector = get_block_vector (block, &n_blocks);
5826 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5828 last_block_vector = identify_blocks_1 (get_insns (),
5829 block_vector + 1,
5830 block_vector + n_blocks,
5831 block_stack);
5833 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5834 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5835 if (0 && last_block_vector != block_vector + n_blocks)
5836 abort ();
5838 free (block_vector);
5839 free (block_stack);
5842 /* Subroutine of identify_blocks. Do the block substitution on the
5843 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5845 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5846 BLOCK_VECTOR is incremented for each block seen. */
5848 static tree *
5849 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5850 rtx insns;
5851 tree *block_vector;
5852 tree *end_block_vector;
5853 tree *orig_block_stack;
5855 rtx insn;
5856 tree *block_stack = orig_block_stack;
5858 for (insn = insns; insn; insn = NEXT_INSN (insn))
5860 if (GET_CODE (insn) == NOTE)
5862 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5864 tree b;
5866 /* If there are more block notes than BLOCKs, something
5867 is badly wrong. */
5868 if (block_vector == end_block_vector)
5869 abort ();
5871 b = *block_vector++;
5872 NOTE_BLOCK (insn) = b;
5873 *block_stack++ = b;
5875 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5877 /* If there are more NOTE_INSN_BLOCK_ENDs than
5878 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5879 if (block_stack == orig_block_stack)
5880 abort ();
5882 NOTE_BLOCK (insn) = *--block_stack;
5885 else if (GET_CODE (insn) == CALL_INSN
5886 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5888 rtx cp = PATTERN (insn);
5890 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5891 end_block_vector, block_stack);
5892 if (XEXP (cp, 1))
5893 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5894 end_block_vector, block_stack);
5895 if (XEXP (cp, 2))
5896 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5897 end_block_vector, block_stack);
5901 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5902 something is badly wrong. */
5903 if (block_stack != orig_block_stack)
5904 abort ();
5906 return block_vector;
5909 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5910 and create duplicate blocks. */
5911 /* ??? Need an option to either create block fragments or to create
5912 abstract origin duplicates of a source block. It really depends
5913 on what optimization has been performed. */
5915 void
5916 reorder_blocks ()
5918 tree block = DECL_INITIAL (current_function_decl);
5919 varray_type block_stack;
5921 if (block == NULL_TREE)
5922 return;
5924 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5926 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5927 reorder_blocks_0 (block);
5929 /* Prune the old trees away, so that they don't get in the way. */
5930 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5931 BLOCK_CHAIN (block) = NULL_TREE;
5933 /* Recreate the block tree from the note nesting. */
5934 reorder_blocks_1 (get_insns (), block, &block_stack);
5935 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5937 /* Remove deleted blocks from the block fragment chains. */
5938 reorder_fix_fragments (block);
5940 VARRAY_FREE (block_stack);
5943 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5945 static void
5946 reorder_blocks_0 (block)
5947 tree block;
5949 while (block)
5951 TREE_ASM_WRITTEN (block) = 0;
5952 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5953 block = BLOCK_CHAIN (block);
5957 static void
5958 reorder_blocks_1 (insns, current_block, p_block_stack)
5959 rtx insns;
5960 tree current_block;
5961 varray_type *p_block_stack;
5963 rtx insn;
5965 for (insn = insns; insn; insn = NEXT_INSN (insn))
5967 if (GET_CODE (insn) == NOTE)
5969 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5971 tree block = NOTE_BLOCK (insn);
5973 /* If we have seen this block before, that means it now
5974 spans multiple address regions. Create a new fragment. */
5975 if (TREE_ASM_WRITTEN (block))
5977 tree new_block = copy_node (block);
5978 tree origin;
5980 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5981 ? BLOCK_FRAGMENT_ORIGIN (block)
5982 : block);
5983 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5984 BLOCK_FRAGMENT_CHAIN (new_block)
5985 = BLOCK_FRAGMENT_CHAIN (origin);
5986 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5988 NOTE_BLOCK (insn) = new_block;
5989 block = new_block;
5992 BLOCK_SUBBLOCKS (block) = 0;
5993 TREE_ASM_WRITTEN (block) = 1;
5994 BLOCK_SUPERCONTEXT (block) = current_block;
5995 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5996 BLOCK_SUBBLOCKS (current_block) = block;
5997 current_block = block;
5998 VARRAY_PUSH_TREE (*p_block_stack, block);
6000 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6002 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6003 VARRAY_POP (*p_block_stack);
6004 BLOCK_SUBBLOCKS (current_block)
6005 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6006 current_block = BLOCK_SUPERCONTEXT (current_block);
6009 else if (GET_CODE (insn) == CALL_INSN
6010 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6012 rtx cp = PATTERN (insn);
6013 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6014 if (XEXP (cp, 1))
6015 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6016 if (XEXP (cp, 2))
6017 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6022 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6023 appears in the block tree, select one of the fragments to become
6024 the new origin block. */
6026 static void
6027 reorder_fix_fragments (block)
6028 tree block;
6030 while (block)
6032 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6033 tree new_origin = NULL_TREE;
6035 if (dup_origin)
6037 if (! TREE_ASM_WRITTEN (dup_origin))
6039 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6041 /* Find the first of the remaining fragments. There must
6042 be at least one -- the current block. */
6043 while (! TREE_ASM_WRITTEN (new_origin))
6044 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6045 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6048 else if (! dup_origin)
6049 new_origin = block;
6051 /* Re-root the rest of the fragments to the new origin. In the
6052 case that DUP_ORIGIN was null, that means BLOCK was the origin
6053 of a chain of fragments and we want to remove those fragments
6054 that didn't make it to the output. */
6055 if (new_origin)
6057 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6058 tree chain = *pp;
6060 while (chain)
6062 if (TREE_ASM_WRITTEN (chain))
6064 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6065 *pp = chain;
6066 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6068 chain = BLOCK_FRAGMENT_CHAIN (chain);
6070 *pp = NULL_TREE;
6073 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6074 block = BLOCK_CHAIN (block);
6078 /* Reverse the order of elements in the chain T of blocks,
6079 and return the new head of the chain (old last element). */
6081 static tree
6082 blocks_nreverse (t)
6083 tree t;
6085 tree prev = 0, decl, next;
6086 for (decl = t; decl; decl = next)
6088 next = BLOCK_CHAIN (decl);
6089 BLOCK_CHAIN (decl) = prev;
6090 prev = decl;
6092 return prev;
6095 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6096 non-NULL, list them all into VECTOR, in a depth-first preorder
6097 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6098 blocks. */
6100 static int
6101 all_blocks (block, vector)
6102 tree block;
6103 tree *vector;
6105 int n_blocks = 0;
6107 while (block)
6109 TREE_ASM_WRITTEN (block) = 0;
6111 /* Record this block. */
6112 if (vector)
6113 vector[n_blocks] = block;
6115 ++n_blocks;
6117 /* Record the subblocks, and their subblocks... */
6118 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6119 vector ? vector + n_blocks : 0);
6120 block = BLOCK_CHAIN (block);
6123 return n_blocks;
6126 /* Return a vector containing all the blocks rooted at BLOCK. The
6127 number of elements in the vector is stored in N_BLOCKS_P. The
6128 vector is dynamically allocated; it is the caller's responsibility
6129 to call `free' on the pointer returned. */
6131 static tree *
6132 get_block_vector (block, n_blocks_p)
6133 tree block;
6134 int *n_blocks_p;
6136 tree *block_vector;
6138 *n_blocks_p = all_blocks (block, NULL);
6139 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6140 all_blocks (block, block_vector);
6142 return block_vector;
6145 static int next_block_index = 2;
6147 /* Set BLOCK_NUMBER for all the blocks in FN. */
6149 void
6150 number_blocks (fn)
6151 tree fn;
6153 int i;
6154 int n_blocks;
6155 tree *block_vector;
6157 /* For SDB and XCOFF debugging output, we start numbering the blocks
6158 from 1 within each function, rather than keeping a running
6159 count. */
6160 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6161 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6162 next_block_index = 1;
6163 #endif
6165 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6167 /* The top-level BLOCK isn't numbered at all. */
6168 for (i = 1; i < n_blocks; ++i)
6169 /* We number the blocks from two. */
6170 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6172 free (block_vector);
6174 return;
6177 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6179 tree
6180 debug_find_var_in_block_tree (var, block)
6181 tree var;
6182 tree block;
6184 tree t;
6186 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6187 if (t == var)
6188 return block;
6190 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6192 tree ret = debug_find_var_in_block_tree (var, t);
6193 if (ret)
6194 return ret;
6197 return NULL_TREE;
6200 /* Allocate a function structure and reset its contents to the defaults. */
6202 static void
6203 prepare_function_start ()
6205 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6207 init_stmt_for_function ();
6208 init_eh_for_function ();
6210 cse_not_expected = ! optimize;
6212 /* Caller save not needed yet. */
6213 caller_save_needed = 0;
6215 /* No stack slots have been made yet. */
6216 stack_slot_list = 0;
6218 current_function_has_nonlocal_label = 0;
6219 current_function_has_nonlocal_goto = 0;
6221 /* There is no stack slot for handling nonlocal gotos. */
6222 nonlocal_goto_handler_slots = 0;
6223 nonlocal_goto_stack_level = 0;
6225 /* No labels have been declared for nonlocal use. */
6226 nonlocal_labels = 0;
6227 nonlocal_goto_handler_labels = 0;
6229 /* No function calls so far in this function. */
6230 function_call_count = 0;
6232 /* No parm regs have been allocated.
6233 (This is important for output_inline_function.) */
6234 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6236 /* Initialize the RTL mechanism. */
6237 init_emit ();
6239 /* Initialize the queue of pending postincrement and postdecrements,
6240 and some other info in expr.c. */
6241 init_expr ();
6243 /* We haven't done register allocation yet. */
6244 reg_renumber = 0;
6246 init_varasm_status (cfun);
6248 /* Clear out data used for inlining. */
6249 cfun->inlinable = 0;
6250 cfun->original_decl_initial = 0;
6251 cfun->original_arg_vector = 0;
6253 cfun->stack_alignment_needed = STACK_BOUNDARY;
6254 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6256 /* Set if a call to setjmp is seen. */
6257 current_function_calls_setjmp = 0;
6259 /* Set if a call to longjmp is seen. */
6260 current_function_calls_longjmp = 0;
6262 current_function_calls_alloca = 0;
6263 current_function_contains_functions = 0;
6264 current_function_is_leaf = 0;
6265 current_function_nothrow = 0;
6266 current_function_sp_is_unchanging = 0;
6267 current_function_uses_only_leaf_regs = 0;
6268 current_function_has_computed_jump = 0;
6269 current_function_is_thunk = 0;
6271 current_function_returns_pcc_struct = 0;
6272 current_function_returns_struct = 0;
6273 current_function_epilogue_delay_list = 0;
6274 current_function_uses_const_pool = 0;
6275 current_function_uses_pic_offset_table = 0;
6276 current_function_cannot_inline = 0;
6278 /* We have not yet needed to make a label to jump to for tail-recursion. */
6279 tail_recursion_label = 0;
6281 /* We haven't had a need to make a save area for ap yet. */
6282 arg_pointer_save_area = 0;
6284 /* No stack slots allocated yet. */
6285 frame_offset = 0;
6287 /* No SAVE_EXPRs in this function yet. */
6288 save_expr_regs = 0;
6290 /* No RTL_EXPRs in this function yet. */
6291 rtl_expr_chain = 0;
6293 /* Set up to allocate temporaries. */
6294 init_temp_slots ();
6296 /* Indicate that we need to distinguish between the return value of the
6297 present function and the return value of a function being called. */
6298 rtx_equal_function_value_matters = 1;
6300 /* Indicate that we have not instantiated virtual registers yet. */
6301 virtuals_instantiated = 0;
6303 /* Indicate that we want CONCATs now. */
6304 generating_concat_p = 1;
6306 /* Indicate we have no need of a frame pointer yet. */
6307 frame_pointer_needed = 0;
6309 /* By default assume not varargs or stdarg. */
6310 current_function_varargs = 0;
6311 current_function_stdarg = 0;
6313 /* We haven't made any trampolines for this function yet. */
6314 trampoline_list = 0;
6316 init_pending_stack_adjust ();
6317 inhibit_defer_pop = 0;
6319 current_function_outgoing_args_size = 0;
6321 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6323 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6325 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6327 (*lang_hooks.function.init) (cfun);
6328 if (init_machine_status)
6329 (*init_machine_status) (cfun);
6332 /* Initialize the rtl expansion mechanism so that we can do simple things
6333 like generate sequences. This is used to provide a context during global
6334 initialization of some passes. */
6335 void
6336 init_dummy_function_start ()
6338 prepare_function_start ();
6341 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6342 and initialize static variables for generating RTL for the statements
6343 of the function. */
6345 void
6346 init_function_start (subr, filename, line)
6347 tree subr;
6348 const char *filename;
6349 int line;
6351 prepare_function_start ();
6353 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6354 cfun->decl = subr;
6356 /* Nonzero if this is a nested function that uses a static chain. */
6358 current_function_needs_context
6359 = (decl_function_context (current_function_decl) != 0
6360 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6362 /* Within function body, compute a type's size as soon it is laid out. */
6363 immediate_size_expand++;
6365 /* Prevent ever trying to delete the first instruction of a function.
6366 Also tell final how to output a linenum before the function prologue.
6367 Note linenums could be missing, e.g. when compiling a Java .class file. */
6368 if (line > 0)
6369 emit_line_note (filename, line);
6371 /* Make sure first insn is a note even if we don't want linenums.
6372 This makes sure the first insn will never be deleted.
6373 Also, final expects a note to appear there. */
6374 emit_note (NULL, NOTE_INSN_DELETED);
6376 /* Set flags used by final.c. */
6377 if (aggregate_value_p (DECL_RESULT (subr)))
6379 #ifdef PCC_STATIC_STRUCT_RETURN
6380 current_function_returns_pcc_struct = 1;
6381 #endif
6382 current_function_returns_struct = 1;
6385 /* Warn if this value is an aggregate type,
6386 regardless of which calling convention we are using for it. */
6387 if (warn_aggregate_return
6388 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6389 warning ("function returns an aggregate");
6391 current_function_returns_pointer
6392 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6395 /* Make sure all values used by the optimization passes have sane
6396 defaults. */
6397 void
6398 init_function_for_compilation ()
6400 reg_renumber = 0;
6402 /* No prologue/epilogue insns yet. */
6403 VARRAY_GROW (prologue, 0);
6404 VARRAY_GROW (epilogue, 0);
6405 VARRAY_GROW (sibcall_epilogue, 0);
6408 /* Indicate that the current function uses extra args
6409 not explicitly mentioned in the argument list in any fashion. */
6411 void
6412 mark_varargs ()
6414 current_function_varargs = 1;
6417 /* Expand a call to __main at the beginning of a possible main function. */
6419 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6420 #undef HAS_INIT_SECTION
6421 #define HAS_INIT_SECTION
6422 #endif
6424 void
6425 expand_main_function ()
6427 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6428 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6430 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6431 rtx tmp, seq;
6433 start_sequence ();
6434 /* Forcibly align the stack. */
6435 #ifdef STACK_GROWS_DOWNWARD
6436 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6437 stack_pointer_rtx, 1, OPTAB_WIDEN);
6438 #else
6439 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6440 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6441 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6442 stack_pointer_rtx, 1, OPTAB_WIDEN);
6443 #endif
6444 if (tmp != stack_pointer_rtx)
6445 emit_move_insn (stack_pointer_rtx, tmp);
6447 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6448 tmp = force_reg (Pmode, const0_rtx);
6449 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6450 seq = gen_sequence ();
6451 end_sequence ();
6453 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6454 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6455 break;
6456 if (tmp)
6457 emit_insn_before (seq, tmp);
6458 else
6459 emit_insn (seq);
6461 #endif
6463 #ifndef HAS_INIT_SECTION
6464 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6465 VOIDmode, 0);
6466 #endif
6469 extern struct obstack permanent_obstack;
6471 /* The PENDING_SIZES represent the sizes of variable-sized types.
6472 Create RTL for the various sizes now (using temporary variables),
6473 so that we can refer to the sizes from the RTL we are generating
6474 for the current function. The PENDING_SIZES are a TREE_LIST. The
6475 TREE_VALUE of each node is a SAVE_EXPR. */
6477 void
6478 expand_pending_sizes (pending_sizes)
6479 tree pending_sizes;
6481 tree tem;
6483 /* Evaluate now the sizes of any types declared among the arguments. */
6484 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6486 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6487 /* Flush the queue in case this parameter declaration has
6488 side-effects. */
6489 emit_queue ();
6493 /* Start the RTL for a new function, and set variables used for
6494 emitting RTL.
6495 SUBR is the FUNCTION_DECL node.
6496 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6497 the function's parameters, which must be run at any return statement. */
6499 void
6500 expand_function_start (subr, parms_have_cleanups)
6501 tree subr;
6502 int parms_have_cleanups;
6504 tree tem;
6505 rtx last_ptr = NULL_RTX;
6507 /* Make sure volatile mem refs aren't considered
6508 valid operands of arithmetic insns. */
6509 init_recog_no_volatile ();
6511 current_function_instrument_entry_exit
6512 = (flag_instrument_function_entry_exit
6513 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6515 current_function_profile
6516 = (profile_flag
6517 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6519 current_function_limit_stack
6520 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6522 /* If function gets a static chain arg, store it in the stack frame.
6523 Do this first, so it gets the first stack slot offset. */
6524 if (current_function_needs_context)
6526 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6528 /* Delay copying static chain if it is not a register to avoid
6529 conflicts with regs used for parameters. */
6530 if (! SMALL_REGISTER_CLASSES
6531 || GET_CODE (static_chain_incoming_rtx) == REG)
6532 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6535 /* If the parameters of this function need cleaning up, get a label
6536 for the beginning of the code which executes those cleanups. This must
6537 be done before doing anything with return_label. */
6538 if (parms_have_cleanups)
6539 cleanup_label = gen_label_rtx ();
6540 else
6541 cleanup_label = 0;
6543 /* Make the label for return statements to jump to. Do not special
6544 case machines with special return instructions -- they will be
6545 handled later during jump, ifcvt, or epilogue creation. */
6546 return_label = gen_label_rtx ();
6548 /* Initialize rtx used to return the value. */
6549 /* Do this before assign_parms so that we copy the struct value address
6550 before any library calls that assign parms might generate. */
6552 /* Decide whether to return the value in memory or in a register. */
6553 if (aggregate_value_p (DECL_RESULT (subr)))
6555 /* Returning something that won't go in a register. */
6556 rtx value_address = 0;
6558 #ifdef PCC_STATIC_STRUCT_RETURN
6559 if (current_function_returns_pcc_struct)
6561 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6562 value_address = assemble_static_space (size);
6564 else
6565 #endif
6567 /* Expect to be passed the address of a place to store the value.
6568 If it is passed as an argument, assign_parms will take care of
6569 it. */
6570 if (struct_value_incoming_rtx)
6572 value_address = gen_reg_rtx (Pmode);
6573 emit_move_insn (value_address, struct_value_incoming_rtx);
6576 if (value_address)
6578 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6579 set_mem_attributes (x, DECL_RESULT (subr), 1);
6580 SET_DECL_RTL (DECL_RESULT (subr), x);
6583 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6584 /* If return mode is void, this decl rtl should not be used. */
6585 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6586 else
6588 /* Compute the return values into a pseudo reg, which we will copy
6589 into the true return register after the cleanups are done. */
6591 /* In order to figure out what mode to use for the pseudo, we
6592 figure out what the mode of the eventual return register will
6593 actually be, and use that. */
6594 rtx hard_reg
6595 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6596 subr, 1);
6598 /* Structures that are returned in registers are not aggregate_value_p,
6599 so we may see a PARALLEL. Don't play pseudo games with this. */
6600 if (! REG_P (hard_reg))
6601 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6602 else
6604 /* Create the pseudo. */
6605 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6607 /* Needed because we may need to move this to memory
6608 in case it's a named return value whose address is taken. */
6609 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6613 /* Initialize rtx for parameters and local variables.
6614 In some cases this requires emitting insns. */
6616 assign_parms (subr);
6618 /* Copy the static chain now if it wasn't a register. The delay is to
6619 avoid conflicts with the parameter passing registers. */
6621 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6622 if (GET_CODE (static_chain_incoming_rtx) != REG)
6623 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6625 /* The following was moved from init_function_start.
6626 The move is supposed to make sdb output more accurate. */
6627 /* Indicate the beginning of the function body,
6628 as opposed to parm setup. */
6629 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6631 if (GET_CODE (get_last_insn ()) != NOTE)
6632 emit_note (NULL, NOTE_INSN_DELETED);
6633 parm_birth_insn = get_last_insn ();
6635 context_display = 0;
6636 if (current_function_needs_context)
6638 /* Fetch static chain values for containing functions. */
6639 tem = decl_function_context (current_function_decl);
6640 /* Copy the static chain pointer into a pseudo. If we have
6641 small register classes, copy the value from memory if
6642 static_chain_incoming_rtx is a REG. */
6643 if (tem)
6645 /* If the static chain originally came in a register, put it back
6646 there, then move it out in the next insn. The reason for
6647 this peculiar code is to satisfy function integration. */
6648 if (SMALL_REGISTER_CLASSES
6649 && GET_CODE (static_chain_incoming_rtx) == REG)
6650 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6651 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6654 while (tem)
6656 tree rtlexp = make_node (RTL_EXPR);
6658 RTL_EXPR_RTL (rtlexp) = last_ptr;
6659 context_display = tree_cons (tem, rtlexp, context_display);
6660 tem = decl_function_context (tem);
6661 if (tem == 0)
6662 break;
6663 /* Chain thru stack frames, assuming pointer to next lexical frame
6664 is found at the place we always store it. */
6665 #ifdef FRAME_GROWS_DOWNWARD
6666 last_ptr = plus_constant (last_ptr,
6667 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6668 #endif
6669 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6670 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6671 last_ptr = copy_to_reg (last_ptr);
6673 /* If we are not optimizing, ensure that we know that this
6674 piece of context is live over the entire function. */
6675 if (! optimize)
6676 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6677 save_expr_regs);
6681 if (current_function_instrument_entry_exit)
6683 rtx fun = DECL_RTL (current_function_decl);
6684 if (GET_CODE (fun) == MEM)
6685 fun = XEXP (fun, 0);
6686 else
6687 abort ();
6688 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6689 2, fun, Pmode,
6690 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6692 hard_frame_pointer_rtx),
6693 Pmode);
6696 if (current_function_profile)
6698 current_function_profile_label_no = profile_label_no++;
6699 #ifdef PROFILE_HOOK
6700 PROFILE_HOOK (current_function_profile_label_no);
6701 #endif
6704 /* After the display initializations is where the tail-recursion label
6705 should go, if we end up needing one. Ensure we have a NOTE here
6706 since some things (like trampolines) get placed before this. */
6707 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6709 /* Evaluate now the sizes of any types declared among the arguments. */
6710 expand_pending_sizes (nreverse (get_pending_sizes ()));
6712 /* Make sure there is a line number after the function entry setup code. */
6713 force_next_line_note ();
6716 /* Undo the effects of init_dummy_function_start. */
6717 void
6718 expand_dummy_function_end ()
6720 /* End any sequences that failed to be closed due to syntax errors. */
6721 while (in_sequence_p ())
6722 end_sequence ();
6724 /* Outside function body, can't compute type's actual size
6725 until next function's body starts. */
6727 free_after_parsing (cfun);
6728 free_after_compilation (cfun);
6729 cfun = 0;
6732 /* Call DOIT for each hard register used as a return value from
6733 the current function. */
6735 void
6736 diddle_return_value (doit, arg)
6737 void (*doit) PARAMS ((rtx, void *));
6738 void *arg;
6740 rtx outgoing = current_function_return_rtx;
6742 if (! outgoing)
6743 return;
6745 if (GET_CODE (outgoing) == REG)
6746 (*doit) (outgoing, arg);
6747 else if (GET_CODE (outgoing) == PARALLEL)
6749 int i;
6751 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6753 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6755 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6756 (*doit) (x, arg);
6761 static void
6762 do_clobber_return_reg (reg, arg)
6763 rtx reg;
6764 void *arg ATTRIBUTE_UNUSED;
6766 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6769 void
6770 clobber_return_register ()
6772 diddle_return_value (do_clobber_return_reg, NULL);
6774 /* In case we do use pseudo to return value, clobber it too. */
6775 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6777 tree decl_result = DECL_RESULT (current_function_decl);
6778 rtx decl_rtl = DECL_RTL (decl_result);
6779 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6781 do_clobber_return_reg (decl_rtl, NULL);
6786 static void
6787 do_use_return_reg (reg, arg)
6788 rtx reg;
6789 void *arg ATTRIBUTE_UNUSED;
6791 emit_insn (gen_rtx_USE (VOIDmode, reg));
6794 void
6795 use_return_register ()
6797 diddle_return_value (do_use_return_reg, NULL);
6800 /* Generate RTL for the end of the current function.
6801 FILENAME and LINE are the current position in the source file.
6803 It is up to language-specific callers to do cleanups for parameters--
6804 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6806 void
6807 expand_function_end (filename, line, end_bindings)
6808 const char *filename;
6809 int line;
6810 int end_bindings;
6812 tree link;
6813 rtx clobber_after;
6815 #ifdef TRAMPOLINE_TEMPLATE
6816 static rtx initial_trampoline;
6817 #endif
6819 finish_expr_for_function ();
6821 /* If arg_pointer_save_area was referenced only from a nested
6822 function, we will not have initialized it yet. Do that now. */
6823 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6824 get_arg_pointer_save_area (cfun);
6826 #ifdef NON_SAVING_SETJMP
6827 /* Don't put any variables in registers if we call setjmp
6828 on a machine that fails to restore the registers. */
6829 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6831 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6832 setjmp_protect (DECL_INITIAL (current_function_decl));
6834 setjmp_protect_args ();
6836 #endif
6838 /* Initialize any trampolines required by this function. */
6839 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6841 tree function = TREE_PURPOSE (link);
6842 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6843 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6844 #ifdef TRAMPOLINE_TEMPLATE
6845 rtx blktramp;
6846 #endif
6847 rtx seq;
6849 #ifdef TRAMPOLINE_TEMPLATE
6850 /* First make sure this compilation has a template for
6851 initializing trampolines. */
6852 if (initial_trampoline == 0)
6854 initial_trampoline
6855 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6856 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6858 ggc_add_rtx_root (&initial_trampoline, 1);
6860 #endif
6862 /* Generate insns to initialize the trampoline. */
6863 start_sequence ();
6864 tramp = round_trampoline_addr (XEXP (tramp, 0));
6865 #ifdef TRAMPOLINE_TEMPLATE
6866 blktramp = replace_equiv_address (initial_trampoline, tramp);
6867 emit_block_move (blktramp, initial_trampoline,
6868 GEN_INT (TRAMPOLINE_SIZE));
6869 #endif
6870 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6871 seq = get_insns ();
6872 end_sequence ();
6874 /* Put those insns at entry to the containing function (this one). */
6875 emit_insns_before (seq, tail_recursion_reentry);
6878 /* If we are doing stack checking and this function makes calls,
6879 do a stack probe at the start of the function to ensure we have enough
6880 space for another stack frame. */
6881 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6883 rtx insn, seq;
6885 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6886 if (GET_CODE (insn) == CALL_INSN)
6888 start_sequence ();
6889 probe_stack_range (STACK_CHECK_PROTECT,
6890 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6891 seq = get_insns ();
6892 end_sequence ();
6893 emit_insns_before (seq, tail_recursion_reentry);
6894 break;
6898 /* Warn about unused parms if extra warnings were specified. */
6899 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6900 warning. WARN_UNUSED_PARAMETER is negative when set by
6901 -Wunused. */
6902 if (warn_unused_parameter > 0
6903 || (warn_unused_parameter < 0 && extra_warnings))
6905 tree decl;
6907 for (decl = DECL_ARGUMENTS (current_function_decl);
6908 decl; decl = TREE_CHAIN (decl))
6909 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6910 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6911 warning_with_decl (decl, "unused parameter `%s'");
6914 /* Delete handlers for nonlocal gotos if nothing uses them. */
6915 if (nonlocal_goto_handler_slots != 0
6916 && ! current_function_has_nonlocal_label)
6917 delete_handlers ();
6919 /* End any sequences that failed to be closed due to syntax errors. */
6920 while (in_sequence_p ())
6921 end_sequence ();
6923 /* Outside function body, can't compute type's actual size
6924 until next function's body starts. */
6925 immediate_size_expand--;
6927 clear_pending_stack_adjust ();
6928 do_pending_stack_adjust ();
6930 /* Mark the end of the function body.
6931 If control reaches this insn, the function can drop through
6932 without returning a value. */
6933 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6935 /* Must mark the last line number note in the function, so that the test
6936 coverage code can avoid counting the last line twice. This just tells
6937 the code to ignore the immediately following line note, since there
6938 already exists a copy of this note somewhere above. This line number
6939 note is still needed for debugging though, so we can't delete it. */
6940 if (flag_test_coverage)
6941 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6943 /* Output a linenumber for the end of the function.
6944 SDB depends on this. */
6945 emit_line_note_force (filename, line);
6947 /* Before the return label (if any), clobber the return
6948 registers so that they are not propagated live to the rest of
6949 the function. This can only happen with functions that drop
6950 through; if there had been a return statement, there would
6951 have either been a return rtx, or a jump to the return label.
6953 We delay actual code generation after the current_function_value_rtx
6954 is computed. */
6955 clobber_after = get_last_insn ();
6957 /* Output the label for the actual return from the function,
6958 if one is expected. This happens either because a function epilogue
6959 is used instead of a return instruction, or because a return was done
6960 with a goto in order to run local cleanups, or because of pcc-style
6961 structure returning. */
6962 if (return_label)
6963 emit_label (return_label);
6965 /* C++ uses this. */
6966 if (end_bindings)
6967 expand_end_bindings (0, 0, 0);
6969 if (current_function_instrument_entry_exit)
6971 rtx fun = DECL_RTL (current_function_decl);
6972 if (GET_CODE (fun) == MEM)
6973 fun = XEXP (fun, 0);
6974 else
6975 abort ();
6976 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6977 2, fun, Pmode,
6978 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6980 hard_frame_pointer_rtx),
6981 Pmode);
6984 /* Let except.c know where it should emit the call to unregister
6985 the function context for sjlj exceptions. */
6986 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6987 sjlj_emit_function_exit_after (get_last_insn ());
6989 /* If we had calls to alloca, and this machine needs
6990 an accurate stack pointer to exit the function,
6991 insert some code to save and restore the stack pointer. */
6992 #ifdef EXIT_IGNORE_STACK
6993 if (! EXIT_IGNORE_STACK)
6994 #endif
6995 if (current_function_calls_alloca)
6997 rtx tem = 0;
6999 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7000 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7003 /* If scalar return value was computed in a pseudo-reg, or was a named
7004 return value that got dumped to the stack, copy that to the hard
7005 return register. */
7006 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7008 tree decl_result = DECL_RESULT (current_function_decl);
7009 rtx decl_rtl = DECL_RTL (decl_result);
7011 if (REG_P (decl_rtl)
7012 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7013 : DECL_REGISTER (decl_result))
7015 rtx real_decl_rtl = current_function_return_rtx;
7017 /* This should be set in assign_parms. */
7018 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7019 abort ();
7021 /* If this is a BLKmode structure being returned in registers,
7022 then use the mode computed in expand_return. Note that if
7023 decl_rtl is memory, then its mode may have been changed,
7024 but that current_function_return_rtx has not. */
7025 if (GET_MODE (real_decl_rtl) == BLKmode)
7026 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7028 /* If a named return value dumped decl_return to memory, then
7029 we may need to re-do the PROMOTE_MODE signed/unsigned
7030 extension. */
7031 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7033 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7035 #ifdef PROMOTE_FUNCTION_RETURN
7036 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7037 &unsignedp, 1);
7038 #endif
7040 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7042 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7043 emit_group_load (real_decl_rtl, decl_rtl,
7044 int_size_in_bytes (TREE_TYPE (decl_result)));
7045 else
7046 emit_move_insn (real_decl_rtl, decl_rtl);
7050 /* If returning a structure, arrange to return the address of the value
7051 in a place where debuggers expect to find it.
7053 If returning a structure PCC style,
7054 the caller also depends on this value.
7055 And current_function_returns_pcc_struct is not necessarily set. */
7056 if (current_function_returns_struct
7057 || current_function_returns_pcc_struct)
7059 rtx value_address
7060 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7061 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7062 #ifdef FUNCTION_OUTGOING_VALUE
7063 rtx outgoing
7064 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7065 current_function_decl);
7066 #else
7067 rtx outgoing
7068 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7069 #endif
7071 /* Mark this as a function return value so integrate will delete the
7072 assignment and USE below when inlining this function. */
7073 REG_FUNCTION_VALUE_P (outgoing) = 1;
7075 #ifdef POINTERS_EXTEND_UNSIGNED
7076 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7077 if (GET_MODE (outgoing) != GET_MODE (value_address))
7078 value_address = convert_memory_address (GET_MODE (outgoing),
7079 value_address);
7080 #endif
7082 emit_move_insn (outgoing, value_address);
7084 /* Show return register used to hold result (in this case the address
7085 of the result. */
7086 current_function_return_rtx = outgoing;
7089 /* If this is an implementation of throw, do what's necessary to
7090 communicate between __builtin_eh_return and the epilogue. */
7091 expand_eh_return ();
7093 /* Emit the actual code to clobber return register. */
7095 rtx seq, after;
7097 start_sequence ();
7098 clobber_return_register ();
7099 seq = gen_sequence ();
7100 end_sequence ();
7102 after = emit_insn_after (seq, clobber_after);
7104 if (clobber_after != after)
7105 cfun->x_clobber_return_insn = after;
7108 /* ??? This should no longer be necessary since stupid is no longer with
7109 us, but there are some parts of the compiler (eg reload_combine, and
7110 sh mach_dep_reorg) that still try and compute their own lifetime info
7111 instead of using the general framework. */
7112 use_return_register ();
7114 /* Fix up any gotos that jumped out to the outermost
7115 binding level of the function.
7116 Must follow emitting RETURN_LABEL. */
7118 /* If you have any cleanups to do at this point,
7119 and they need to create temporary variables,
7120 then you will lose. */
7121 expand_fixups (get_insns ());
7125 get_arg_pointer_save_area (f)
7126 struct function *f;
7128 rtx ret = f->x_arg_pointer_save_area;
7130 if (! ret)
7132 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7133 f->x_arg_pointer_save_area = ret;
7136 if (f == cfun && ! f->arg_pointer_save_area_init)
7138 rtx seq;
7140 /* Save the arg pointer at the beginning of the function. The
7141 generated stack slot may not be a valid memory address, so we
7142 have to check it and fix it if necessary. */
7143 start_sequence ();
7144 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7145 seq = gen_sequence ();
7146 end_sequence ();
7148 push_topmost_sequence ();
7149 emit_insn_after (seq, get_insns ());
7150 pop_topmost_sequence ();
7153 return ret;
7156 /* Extend a vector that records the INSN_UIDs of INSNS (either a
7157 sequence or a single insn). */
7159 static void
7160 record_insns (insns, vecp)
7161 rtx insns;
7162 varray_type *vecp;
7164 if (GET_CODE (insns) == SEQUENCE)
7166 int len = XVECLEN (insns, 0);
7167 int i = VARRAY_SIZE (*vecp);
7169 VARRAY_GROW (*vecp, i + len);
7170 while (--len >= 0)
7172 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
7173 ++i;
7176 else
7178 int i = VARRAY_SIZE (*vecp);
7179 VARRAY_GROW (*vecp, i + 1);
7180 VARRAY_INT (*vecp, i) = INSN_UID (insns);
7184 /* Determine how many INSN_UIDs in VEC are part of INSN. */
7186 static int
7187 contains (insn, vec)
7188 rtx insn;
7189 varray_type vec;
7191 int i, j;
7193 if (GET_CODE (insn) == INSN
7194 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7196 int count = 0;
7197 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7198 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7199 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7200 count++;
7201 return count;
7203 else
7205 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7206 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7207 return 1;
7209 return 0;
7213 prologue_epilogue_contains (insn)
7214 rtx insn;
7216 if (contains (insn, prologue))
7217 return 1;
7218 if (contains (insn, epilogue))
7219 return 1;
7220 return 0;
7224 sibcall_epilogue_contains (insn)
7225 rtx insn;
7227 if (sibcall_epilogue)
7228 return contains (insn, sibcall_epilogue);
7229 return 0;
7232 #ifdef HAVE_return
7233 /* Insert gen_return at the end of block BB. This also means updating
7234 block_for_insn appropriately. */
7236 static void
7237 emit_return_into_block (bb, line_note)
7238 basic_block bb;
7239 rtx line_note;
7241 rtx p, end;
7243 p = NEXT_INSN (bb->end);
7244 end = emit_jump_insn_after (gen_return (), bb->end);
7245 if (line_note)
7246 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7247 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7249 #endif /* HAVE_return */
7251 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7253 /* These functions convert the epilogue into a variant that does not modify the
7254 stack pointer. This is used in cases where a function returns an object
7255 whose size is not known until it is computed. The called function leaves the
7256 object on the stack, leaves the stack depressed, and returns a pointer to
7257 the object.
7259 What we need to do is track all modifications and references to the stack
7260 pointer, deleting the modifications and changing the references to point to
7261 the location the stack pointer would have pointed to had the modifications
7262 taken place.
7264 These functions need to be portable so we need to make as few assumptions
7265 about the epilogue as we can. However, the epilogue basically contains
7266 three things: instructions to reset the stack pointer, instructions to
7267 reload registers, possibly including the frame pointer, and an
7268 instruction to return to the caller.
7270 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7271 We also make no attempt to validate the insns we make since if they are
7272 invalid, we probably can't do anything valid. The intent is that these
7273 routines get "smarter" as more and more machines start to use them and
7274 they try operating on different epilogues.
7276 We use the following structure to track what the part of the epilogue that
7277 we've already processed has done. We keep two copies of the SP equivalence,
7278 one for use during the insn we are processing and one for use in the next
7279 insn. The difference is because one part of a PARALLEL may adjust SP
7280 and the other may use it. */
7282 struct epi_info
7284 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7285 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7286 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7287 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7288 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7289 should be set to once we no longer need
7290 its value. */
7293 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7294 static void emit_equiv_load PARAMS ((struct epi_info *));
7296 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7297 to the stack pointer. Return the new sequence. */
7299 static rtx
7300 keep_stack_depressed (seq)
7301 rtx seq;
7303 int i, j;
7304 struct epi_info info;
7306 /* If the epilogue is just a single instruction, it ust be OK as is. */
7308 if (GET_CODE (seq) != SEQUENCE)
7309 return seq;
7311 /* Otherwise, start a sequence, initialize the information we have, and
7312 process all the insns we were given. */
7313 start_sequence ();
7315 info.sp_equiv_reg = stack_pointer_rtx;
7316 info.sp_offset = 0;
7317 info.equiv_reg_src = 0;
7319 for (i = 0; i < XVECLEN (seq, 0); i++)
7321 rtx insn = XVECEXP (seq, 0, i);
7323 if (!INSN_P (insn))
7325 add_insn (insn);
7326 continue;
7329 /* If this insn references the register that SP is equivalent to and
7330 we have a pending load to that register, we must force out the load
7331 first and then indicate we no longer know what SP's equivalent is. */
7332 if (info.equiv_reg_src != 0
7333 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7335 emit_equiv_load (&info);
7336 info.sp_equiv_reg = 0;
7339 info.new_sp_equiv_reg = info.sp_equiv_reg;
7340 info.new_sp_offset = info.sp_offset;
7342 /* If this is a (RETURN) and the return address is on the stack,
7343 update the address and change to an indirect jump. */
7344 if (GET_CODE (PATTERN (insn)) == RETURN
7345 || (GET_CODE (PATTERN (insn)) == PARALLEL
7346 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7348 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7349 rtx base = 0;
7350 HOST_WIDE_INT offset = 0;
7351 rtx jump_insn, jump_set;
7353 /* If the return address is in a register, we can emit the insn
7354 unchanged. Otherwise, it must be a MEM and we see what the
7355 base register and offset are. In any case, we have to emit any
7356 pending load to the equivalent reg of SP, if any. */
7357 if (GET_CODE (retaddr) == REG)
7359 emit_equiv_load (&info);
7360 add_insn (insn);
7361 continue;
7363 else if (GET_CODE (retaddr) == MEM
7364 && GET_CODE (XEXP (retaddr, 0)) == REG)
7365 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7366 else if (GET_CODE (retaddr) == MEM
7367 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7368 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7369 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7371 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7372 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7374 else
7375 abort ();
7377 /* If the base of the location containing the return pointer
7378 is SP, we must update it with the replacement address. Otherwise,
7379 just build the necessary MEM. */
7380 retaddr = plus_constant (base, offset);
7381 if (base == stack_pointer_rtx)
7382 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7383 plus_constant (info.sp_equiv_reg,
7384 info.sp_offset));
7386 retaddr = gen_rtx_MEM (Pmode, retaddr);
7388 /* If there is a pending load to the equivalent register for SP
7389 and we reference that register, we must load our address into
7390 a scratch register and then do that load. */
7391 if (info.equiv_reg_src
7392 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7394 unsigned int regno;
7395 rtx reg;
7397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7398 if (HARD_REGNO_MODE_OK (regno, Pmode)
7399 && !fixed_regs[regno]
7400 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7401 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7402 regno)
7403 && !refers_to_regno_p (regno,
7404 regno + HARD_REGNO_NREGS (regno,
7405 Pmode),
7406 info.equiv_reg_src, NULL))
7407 break;
7409 if (regno == FIRST_PSEUDO_REGISTER)
7410 abort ();
7412 reg = gen_rtx_REG (Pmode, regno);
7413 emit_move_insn (reg, retaddr);
7414 retaddr = reg;
7417 emit_equiv_load (&info);
7418 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7420 /* Show the SET in the above insn is a RETURN. */
7421 jump_set = single_set (jump_insn);
7422 if (jump_set == 0)
7423 abort ();
7424 else
7425 SET_IS_RETURN_P (jump_set) = 1;
7428 /* If SP is not mentioned in the pattern and its equivalent register, if
7429 any, is not modified, just emit it. Otherwise, if neither is set,
7430 replace the reference to SP and emit the insn. If none of those are
7431 true, handle each SET individually. */
7432 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7433 && (info.sp_equiv_reg == stack_pointer_rtx
7434 || !reg_set_p (info.sp_equiv_reg, insn)))
7435 add_insn (insn);
7436 else if (! reg_set_p (stack_pointer_rtx, insn)
7437 && (info.sp_equiv_reg == stack_pointer_rtx
7438 || !reg_set_p (info.sp_equiv_reg, insn)))
7440 if (! validate_replace_rtx (stack_pointer_rtx,
7441 plus_constant (info.sp_equiv_reg,
7442 info.sp_offset),
7443 insn))
7444 abort ();
7446 add_insn (insn);
7448 else if (GET_CODE (PATTERN (insn)) == SET)
7449 handle_epilogue_set (PATTERN (insn), &info);
7450 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7452 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7453 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7454 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7456 else
7457 add_insn (insn);
7459 info.sp_equiv_reg = info.new_sp_equiv_reg;
7460 info.sp_offset = info.new_sp_offset;
7463 seq = gen_sequence ();
7464 end_sequence ();
7465 return seq;
7468 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7469 structure that contains information about what we've seen so far. We
7470 process this SET by either updating that data or by emitting one or
7471 more insns. */
7473 static void
7474 handle_epilogue_set (set, p)
7475 rtx set;
7476 struct epi_info *p;
7478 /* First handle the case where we are setting SP. Record what it is being
7479 set from. If unknown, abort. */
7480 if (reg_set_p (stack_pointer_rtx, set))
7482 if (SET_DEST (set) != stack_pointer_rtx)
7483 abort ();
7485 if (GET_CODE (SET_SRC (set)) == PLUS
7486 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7488 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7489 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7491 else
7492 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7494 /* If we are adjusting SP, we adjust from the old data. */
7495 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7497 p->new_sp_equiv_reg = p->sp_equiv_reg;
7498 p->new_sp_offset += p->sp_offset;
7501 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7502 abort ();
7504 return;
7507 /* Next handle the case where we are setting SP's equivalent register.
7508 If we already have a value to set it to, abort. We could update, but
7509 there seems little point in handling that case. Note that we have
7510 to allow for the case where we are setting the register set in
7511 the previous part of a PARALLEL inside a single insn. But use the
7512 old offset for any updates within this insn. */
7513 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7515 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7516 || p->equiv_reg_src != 0)
7517 abort ();
7518 else
7519 p->equiv_reg_src
7520 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7521 plus_constant (p->sp_equiv_reg,
7522 p->sp_offset));
7525 /* Otherwise, replace any references to SP in the insn to its new value
7526 and emit the insn. */
7527 else
7529 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7530 plus_constant (p->sp_equiv_reg,
7531 p->sp_offset));
7532 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7533 plus_constant (p->sp_equiv_reg,
7534 p->sp_offset));
7535 emit_insn (set);
7539 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7541 static void
7542 emit_equiv_load (p)
7543 struct epi_info *p;
7545 if (p->equiv_reg_src != 0)
7546 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7548 p->equiv_reg_src = 0;
7550 #endif
7552 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7553 this into place with notes indicating where the prologue ends and where
7554 the epilogue begins. Update the basic block information when possible. */
7556 void
7557 thread_prologue_and_epilogue_insns (f)
7558 rtx f ATTRIBUTE_UNUSED;
7560 int inserted = 0;
7561 edge e;
7562 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7563 rtx seq;
7564 #endif
7565 #ifdef HAVE_prologue
7566 rtx prologue_end = NULL_RTX;
7567 #endif
7568 #if defined (HAVE_epilogue) || defined(HAVE_return)
7569 rtx epilogue_end = NULL_RTX;
7570 #endif
7572 #ifdef HAVE_prologue
7573 if (HAVE_prologue)
7575 start_sequence ();
7576 seq = gen_prologue ();
7577 emit_insn (seq);
7579 /* Retain a map of the prologue insns. */
7580 if (GET_CODE (seq) != SEQUENCE)
7581 seq = get_insns ();
7582 record_insns (seq, &prologue);
7583 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7585 seq = gen_sequence ();
7586 end_sequence ();
7588 /* Can't deal with multiple successors of the entry block
7589 at the moment. Function should always have at least one
7590 entry point. */
7591 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7592 abort ();
7594 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7595 inserted = 1;
7597 #endif
7599 /* If the exit block has no non-fake predecessors, we don't need
7600 an epilogue. */
7601 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7602 if ((e->flags & EDGE_FAKE) == 0)
7603 break;
7604 if (e == NULL)
7605 goto epilogue_done;
7607 #ifdef HAVE_return
7608 if (optimize && HAVE_return)
7610 /* If we're allowed to generate a simple return instruction,
7611 then by definition we don't need a full epilogue. Examine
7612 the block that falls through to EXIT. If it does not
7613 contain any code, examine its predecessors and try to
7614 emit (conditional) return instructions. */
7616 basic_block last;
7617 edge e_next;
7618 rtx label;
7620 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7621 if (e->flags & EDGE_FALLTHRU)
7622 break;
7623 if (e == NULL)
7624 goto epilogue_done;
7625 last = e->src;
7627 /* Verify that there are no active instructions in the last block. */
7628 label = last->end;
7629 while (label && GET_CODE (label) != CODE_LABEL)
7631 if (active_insn_p (label))
7632 break;
7633 label = PREV_INSN (label);
7636 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7638 rtx epilogue_line_note = NULL_RTX;
7640 /* Locate the line number associated with the closing brace,
7641 if we can find one. */
7642 for (seq = get_last_insn ();
7643 seq && ! active_insn_p (seq);
7644 seq = PREV_INSN (seq))
7645 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7647 epilogue_line_note = seq;
7648 break;
7651 for (e = last->pred; e; e = e_next)
7653 basic_block bb = e->src;
7654 rtx jump;
7656 e_next = e->pred_next;
7657 if (bb == ENTRY_BLOCK_PTR)
7658 continue;
7660 jump = bb->end;
7661 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7662 continue;
7664 /* If we have an unconditional jump, we can replace that
7665 with a simple return instruction. */
7666 if (simplejump_p (jump))
7668 emit_return_into_block (bb, epilogue_line_note);
7669 delete_insn (jump);
7672 /* If we have a conditional jump, we can try to replace
7673 that with a conditional return instruction. */
7674 else if (condjump_p (jump))
7676 rtx ret, *loc;
7678 ret = SET_SRC (PATTERN (jump));
7679 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7680 loc = &XEXP (ret, 1);
7681 else
7682 loc = &XEXP (ret, 2);
7683 ret = gen_rtx_RETURN (VOIDmode);
7685 if (! validate_change (jump, loc, ret, 0))
7686 continue;
7687 if (JUMP_LABEL (jump))
7688 LABEL_NUSES (JUMP_LABEL (jump))--;
7690 /* If this block has only one successor, it both jumps
7691 and falls through to the fallthru block, so we can't
7692 delete the edge. */
7693 if (bb->succ->succ_next == NULL)
7694 continue;
7696 else
7697 continue;
7699 /* Fix up the CFG for the successful change we just made. */
7700 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7703 /* Emit a return insn for the exit fallthru block. Whether
7704 this is still reachable will be determined later. */
7706 emit_barrier_after (last->end);
7707 emit_return_into_block (last, epilogue_line_note);
7708 epilogue_end = last->end;
7709 last->succ->flags &= ~EDGE_FALLTHRU;
7710 goto epilogue_done;
7713 #endif
7714 #ifdef HAVE_epilogue
7715 if (HAVE_epilogue)
7717 /* Find the edge that falls through to EXIT. Other edges may exist
7718 due to RETURN instructions, but those don't need epilogues.
7719 There really shouldn't be a mixture -- either all should have
7720 been converted or none, however... */
7722 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7723 if (e->flags & EDGE_FALLTHRU)
7724 break;
7725 if (e == NULL)
7726 goto epilogue_done;
7728 start_sequence ();
7729 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7731 seq = gen_epilogue ();
7733 #ifdef INCOMING_RETURN_ADDR_RTX
7734 /* If this function returns with the stack depressed and we can support
7735 it, massage the epilogue to actually do that. */
7736 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7737 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7738 seq = keep_stack_depressed (seq);
7739 #endif
7741 emit_jump_insn (seq);
7743 /* Retain a map of the epilogue insns. */
7744 if (GET_CODE (seq) != SEQUENCE)
7745 seq = get_insns ();
7746 record_insns (seq, &epilogue);
7748 seq = gen_sequence ();
7749 end_sequence ();
7751 insert_insn_on_edge (seq, e);
7752 inserted = 1;
7754 #endif
7755 epilogue_done:
7757 if (inserted)
7758 commit_edge_insertions ();
7760 #ifdef HAVE_sibcall_epilogue
7761 /* Emit sibling epilogues before any sibling call sites. */
7762 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7764 basic_block bb = e->src;
7765 rtx insn = bb->end;
7766 rtx i;
7767 rtx newinsn;
7769 if (GET_CODE (insn) != CALL_INSN
7770 || ! SIBLING_CALL_P (insn))
7771 continue;
7773 start_sequence ();
7774 seq = gen_sibcall_epilogue ();
7775 end_sequence ();
7777 i = PREV_INSN (insn);
7778 newinsn = emit_insn_before (seq, insn);
7780 /* Retain a map of the epilogue insns. Used in life analysis to
7781 avoid getting rid of sibcall epilogue insns. */
7782 record_insns (GET_CODE (seq) == SEQUENCE
7783 ? seq : newinsn, &sibcall_epilogue);
7785 #endif
7787 #ifdef HAVE_prologue
7788 if (prologue_end)
7790 rtx insn, prev;
7792 /* GDB handles `break f' by setting a breakpoint on the first
7793 line note after the prologue. Which means (1) that if
7794 there are line number notes before where we inserted the
7795 prologue we should move them, and (2) we should generate a
7796 note before the end of the first basic block, if there isn't
7797 one already there.
7799 ??? This behaviour is completely broken when dealing with
7800 multiple entry functions. We simply place the note always
7801 into first basic block and let alternate entry points
7802 to be missed.
7805 for (insn = prologue_end; insn; insn = prev)
7807 prev = PREV_INSN (insn);
7808 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7810 /* Note that we cannot reorder the first insn in the
7811 chain, since rest_of_compilation relies on that
7812 remaining constant. */
7813 if (prev == NULL)
7814 break;
7815 reorder_insns (insn, insn, prologue_end);
7819 /* Find the last line number note in the first block. */
7820 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7821 insn != prologue_end && insn;
7822 insn = PREV_INSN (insn))
7823 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7824 break;
7826 /* If we didn't find one, make a copy of the first line number
7827 we run across. */
7828 if (! insn)
7830 for (insn = next_active_insn (prologue_end);
7831 insn;
7832 insn = PREV_INSN (insn))
7833 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7835 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7836 NOTE_LINE_NUMBER (insn),
7837 prologue_end);
7838 break;
7842 #endif
7843 #ifdef HAVE_epilogue
7844 if (epilogue_end)
7846 rtx insn, next;
7848 /* Similarly, move any line notes that appear after the epilogue.
7849 There is no need, however, to be quite so anal about the existence
7850 of such a note. */
7851 for (insn = epilogue_end; insn; insn = next)
7853 next = NEXT_INSN (insn);
7854 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7855 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7858 #endif
7861 /* Reposition the prologue-end and epilogue-begin notes after instruction
7862 scheduling and delayed branch scheduling. */
7864 void
7865 reposition_prologue_and_epilogue_notes (f)
7866 rtx f ATTRIBUTE_UNUSED;
7868 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7869 rtx insn, last, note;
7870 int len;
7872 if ((len = VARRAY_SIZE (prologue)) > 0)
7874 last = 0, note = 0;
7876 /* Scan from the beginning until we reach the last prologue insn.
7877 We apparently can't depend on basic_block_{head,end} after
7878 reorg has run. */
7879 for (insn = f; insn; insn = NEXT_INSN (insn))
7881 if (GET_CODE (insn) == NOTE)
7883 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7884 note = insn;
7886 else if (contains (insn, prologue))
7888 last = insn;
7889 if (--len == 0)
7890 break;
7894 if (last)
7896 rtx next;
7898 /* Find the prologue-end note if we haven't already, and
7899 move it to just after the last prologue insn. */
7900 if (note == 0)
7902 for (note = last; (note = NEXT_INSN (note));)
7903 if (GET_CODE (note) == NOTE
7904 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7905 break;
7908 next = NEXT_INSN (note);
7910 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7911 if (GET_CODE (last) == CODE_LABEL)
7912 last = NEXT_INSN (last);
7913 reorder_insns (note, note, last);
7917 if ((len = VARRAY_SIZE (epilogue)) > 0)
7919 last = 0, note = 0;
7921 /* Scan from the end until we reach the first epilogue insn.
7922 We apparently can't depend on basic_block_{head,end} after
7923 reorg has run. */
7924 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7926 if (GET_CODE (insn) == NOTE)
7928 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7929 note = insn;
7931 else if (contains (insn, epilogue))
7933 last = insn;
7934 if (--len == 0)
7935 break;
7939 if (last)
7941 /* Find the epilogue-begin note if we haven't already, and
7942 move it to just before the first epilogue insn. */
7943 if (note == 0)
7945 for (note = insn; (note = PREV_INSN (note));)
7946 if (GET_CODE (note) == NOTE
7947 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7948 break;
7951 if (PREV_INSN (last) != note)
7952 reorder_insns (note, note, PREV_INSN (last));
7955 #endif /* HAVE_prologue or HAVE_epilogue */
7958 /* Mark P for GC. */
7960 static void
7961 mark_function_status (p)
7962 struct function *p;
7964 struct var_refs_queue *q;
7965 struct temp_slot *t;
7966 int i;
7967 rtx *r;
7969 if (p == 0)
7970 return;
7972 ggc_mark_rtx (p->arg_offset_rtx);
7974 if (p->x_parm_reg_stack_loc)
7975 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7976 i > 0; --i, ++r)
7977 ggc_mark_rtx (*r);
7979 ggc_mark_rtx (p->return_rtx);
7980 ggc_mark_rtx (p->x_cleanup_label);
7981 ggc_mark_rtx (p->x_return_label);
7982 ggc_mark_rtx (p->x_save_expr_regs);
7983 ggc_mark_rtx (p->x_stack_slot_list);
7984 ggc_mark_rtx (p->x_parm_birth_insn);
7985 ggc_mark_rtx (p->x_tail_recursion_label);
7986 ggc_mark_rtx (p->x_tail_recursion_reentry);
7987 ggc_mark_rtx (p->internal_arg_pointer);
7988 ggc_mark_rtx (p->x_arg_pointer_save_area);
7989 ggc_mark_tree (p->x_rtl_expr_chain);
7990 ggc_mark_rtx (p->x_last_parm_insn);
7991 ggc_mark_tree (p->x_context_display);
7992 ggc_mark_tree (p->x_trampoline_list);
7993 ggc_mark_rtx (p->epilogue_delay_list);
7994 ggc_mark_rtx (p->x_clobber_return_insn);
7996 for (t = p->x_temp_slots; t != 0; t = t->next)
7998 ggc_mark (t);
7999 ggc_mark_rtx (t->slot);
8000 ggc_mark_rtx (t->address);
8001 ggc_mark_tree (t->rtl_expr);
8002 ggc_mark_tree (t->type);
8005 for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
8007 ggc_mark (q);
8008 ggc_mark_rtx (q->modified);
8011 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
8012 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
8013 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
8014 ggc_mark_tree (p->x_nonlocal_labels);
8016 mark_hard_reg_initial_vals (p);
8019 /* Mark the struct function pointed to by *ARG for GC, if it is not
8020 NULL. This is used to mark the current function and the outer
8021 function chain. */
8023 static void
8024 maybe_mark_struct_function (arg)
8025 void *arg;
8027 struct function *f = *(struct function **) arg;
8029 if (f == 0)
8030 return;
8032 ggc_mark_struct_function (f);
8035 /* Mark a struct function * for GC. This is called from ggc-common.c. */
8037 void
8038 ggc_mark_struct_function (f)
8039 struct function *f;
8041 ggc_mark (f);
8042 ggc_mark_tree (f->decl);
8044 mark_function_status (f);
8045 mark_eh_status (f->eh);
8046 mark_stmt_status (f->stmt);
8047 mark_expr_status (f->expr);
8048 mark_emit_status (f->emit);
8049 mark_varasm_status (f->varasm);
8051 if (mark_machine_status)
8052 (*mark_machine_status) (f);
8053 (*lang_hooks.function.mark) (f);
8055 if (f->original_arg_vector)
8056 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
8057 if (f->original_decl_initial)
8058 ggc_mark_tree (f->original_decl_initial);
8059 if (f->outer)
8060 ggc_mark_struct_function (f->outer);
8063 /* Called once, at initialization, to initialize function.c. */
8065 void
8066 init_function_once ()
8068 ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
8069 ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
8070 maybe_mark_struct_function);
8072 VARRAY_INT_INIT (prologue, 0, "prologue");
8073 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8074 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");