Update copyright.
[official-gcc.git] / gcc / function.c
blob851dd246ee98402461321184c124fe5c69b531de
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "libfuncs.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "toplev.h"
59 #include "hashtab.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62 #include "integrate.h"
63 #include "langhooks.h"
65 #ifndef TRAMPOLINE_ALIGNMENT
66 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
67 #endif
69 #ifndef LOCAL_ALIGNMENT
70 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #endif
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
77 #ifndef NAME__MAIN
78 #define NAME__MAIN "__main"
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
103 compiler passes. */
104 int current_function_is_leaf;
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
109 int current_function_nothrow;
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
127 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
128 static GTY(()) int funcdef_no;
130 /* These variables hold pointers to functions to create and destroy
131 target specific, per-function data structures. */
132 struct machine_function * (*init_machine_status) PARAMS ((void));
134 /* The FUNCTION_DECL for an inline function currently being expanded. */
135 tree inline_function_decl;
137 /* The currently compiled function. */
138 struct function *cfun = 0;
140 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
141 static GTY(()) varray_type prologue;
142 static GTY(()) varray_type epilogue;
144 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
145 in this function. */
146 static GTY(()) varray_type sibcall_epilogue;
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
166 struct temp_slot GTY(())
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 unsigned int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The type of the object in the slot, or zero if it doesn't correspond
180 to a type. We use this to determine whether a slot can be reused.
181 It can be reused if objects of the type of the new slot will always
182 conflict with objects of the type of the old slot. */
183 tree type;
184 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
185 tree rtl_expr;
186 /* Nonzero if this temporary is currently in use. */
187 char in_use;
188 /* Nonzero if this temporary has its address taken. */
189 char addr_taken;
190 /* Nesting level at which this slot is being used. */
191 int level;
192 /* Nonzero if this should survive a call to free_temp_slots. */
193 int keep;
194 /* The offset of the slot from the frame_pointer, including extra space
195 for alignment. This info is for combine_temp_slots. */
196 HOST_WIDE_INT base_offset;
197 /* The size of the slot, including extra space for alignment. This
198 info is for combine_temp_slots. */
199 HOST_WIDE_INT full_size;
202 /* This structure is used to record MEMs or pseudos used to replace VAR, any
203 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
204 maintain this list in case two operands of an insn were required to match;
205 in that case we must ensure we use the same replacement. */
207 struct fixup_replacement GTY(())
209 rtx old;
210 rtx new;
211 struct fixup_replacement *next;
214 struct insns_for_mem_entry
216 /* A MEM. */
217 rtx key;
218 /* These are the INSNs which reference the MEM. */
219 rtx insns;
222 /* Forward declarations. */
224 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
225 int, struct function *));
226 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
227 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
228 enum machine_mode, enum machine_mode,
229 int, unsigned int, int,
230 htab_t));
231 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
232 enum machine_mode,
233 htab_t));
234 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
235 htab_t));
236 static struct fixup_replacement
237 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
238 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
239 int, int, rtx));
240 static void fixup_var_refs_insns_with_hash
241 PARAMS ((htab_t, rtx,
242 enum machine_mode, int, rtx));
243 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
244 int, int, rtx));
245 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
246 struct fixup_replacement **, rtx));
247 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
248 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
249 int));
250 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
251 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
252 static void instantiate_decls PARAMS ((tree, int));
253 static void instantiate_decls_1 PARAMS ((tree, int));
254 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
255 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
256 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
257 static void delete_handlers PARAMS ((void));
258 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
259 struct args_size *));
260 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
261 tree));
262 static rtx round_trampoline_addr PARAMS ((rtx));
263 static rtx adjust_trampoline_addr PARAMS ((rtx));
264 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
265 static void reorder_blocks_0 PARAMS ((tree));
266 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
267 static void reorder_fix_fragments PARAMS ((tree));
268 static tree blocks_nreverse PARAMS ((tree));
269 static int all_blocks PARAMS ((tree, tree *));
270 static tree *get_block_vector PARAMS ((tree, int *));
271 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
272 /* We always define `record_insns' even if its not used so that we
273 can always export `prologue_epilogue_contains'. */
274 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
275 static int contains PARAMS ((rtx, varray_type));
276 #ifdef HAVE_return
277 static void emit_return_into_block PARAMS ((basic_block, rtx));
278 #endif
279 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
280 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
281 htab_t));
282 static void purge_single_hard_subreg_set PARAMS ((rtx));
283 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
284 static rtx keep_stack_depressed PARAMS ((rtx));
285 #endif
286 static int is_addressof PARAMS ((rtx *, void *));
287 static hashval_t insns_for_mem_hash PARAMS ((const void *));
288 static int insns_for_mem_comp PARAMS ((const void *, const void *));
289 static int insns_for_mem_walk PARAMS ((rtx *, void *));
290 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
291 static void prepare_function_start PARAMS ((void));
292 static void do_clobber_return_reg PARAMS ((rtx, void *));
293 static void do_use_return_reg PARAMS ((rtx, void *));
295 /* Pointer to chain of `struct function' for containing functions. */
296 static GTY(()) struct function *outer_function_chain;
298 /* Given a function decl for a containing function,
299 return the `struct function' for it. */
301 struct function *
302 find_function_data (decl)
303 tree decl;
305 struct function *p;
307 for (p = outer_function_chain; p; p = p->outer)
308 if (p->decl == decl)
309 return p;
311 abort ();
314 /* Save the current context for compilation of a nested function.
315 This is called from language-specific code. The caller should use
316 the enter_nested langhook to save any language-specific state,
317 since this function knows only about language-independent
318 variables. */
320 void
321 push_function_context_to (context)
322 tree context;
324 struct function *p;
326 if (context)
328 if (context == current_function_decl)
329 cfun->contains_functions = 1;
330 else
332 struct function *containing = find_function_data (context);
333 containing->contains_functions = 1;
337 if (cfun == 0)
338 init_dummy_function_start ();
339 p = cfun;
341 p->outer = outer_function_chain;
342 outer_function_chain = p;
343 p->fixup_var_refs_queue = 0;
345 (*lang_hooks.function.enter_nested) (p);
347 cfun = 0;
350 void
351 push_function_context ()
353 push_function_context_to (current_function_decl);
356 /* Restore the last saved context, at the end of a nested function.
357 This function is called from language-specific code. */
359 void
360 pop_function_context_from (context)
361 tree context ATTRIBUTE_UNUSED;
363 struct function *p = outer_function_chain;
364 struct var_refs_queue *queue;
366 cfun = p;
367 outer_function_chain = p->outer;
369 current_function_decl = p->decl;
370 reg_renumber = 0;
372 restore_emit_status (p);
374 (*lang_hooks.function.leave_nested) (p);
376 /* Finish doing put_var_into_stack for any of our variables which became
377 addressable during the nested function. If only one entry has to be
378 fixed up, just do that one. Otherwise, first make a list of MEMs that
379 are not to be unshared. */
380 if (p->fixup_var_refs_queue == 0)
382 else if (p->fixup_var_refs_queue->next == 0)
383 fixup_var_refs (p->fixup_var_refs_queue->modified,
384 p->fixup_var_refs_queue->promoted_mode,
385 p->fixup_var_refs_queue->unsignedp,
386 p->fixup_var_refs_queue->modified, 0);
387 else
389 rtx list = 0;
391 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
392 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
394 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
395 fixup_var_refs (queue->modified, queue->promoted_mode,
396 queue->unsignedp, list, 0);
400 p->fixup_var_refs_queue = 0;
402 /* Reset variables that have known state during rtx generation. */
403 rtx_equal_function_value_matters = 1;
404 virtuals_instantiated = 0;
405 generating_concat_p = 1;
408 void
409 pop_function_context ()
411 pop_function_context_from (current_function_decl);
414 /* Clear out all parts of the state in F that can safely be discarded
415 after the function has been parsed, but not compiled, to let
416 garbage collection reclaim the memory. */
418 void
419 free_after_parsing (f)
420 struct function *f;
422 /* f->expr->forced_labels is used by code generation. */
423 /* f->emit->regno_reg_rtx is used by code generation. */
424 /* f->varasm is used by code generation. */
425 /* f->eh->eh_return_stub_label is used by code generation. */
427 (*lang_hooks.function.final) (f);
428 f->stmt = NULL;
431 /* Clear out all parts of the state in F that can safely be discarded
432 after the function has been compiled, to let garbage collection
433 reclaim the memory. */
435 void
436 free_after_compilation (f)
437 struct function *f;
439 f->eh = NULL;
440 f->expr = NULL;
441 f->emit = NULL;
442 f->varasm = NULL;
443 f->machine = NULL;
445 f->x_temp_slots = NULL;
446 f->arg_offset_rtx = NULL;
447 f->return_rtx = NULL;
448 f->internal_arg_pointer = NULL;
449 f->x_nonlocal_labels = NULL;
450 f->x_nonlocal_goto_handler_slots = NULL;
451 f->x_nonlocal_goto_handler_labels = NULL;
452 f->x_nonlocal_goto_stack_level = NULL;
453 f->x_cleanup_label = NULL;
454 f->x_return_label = NULL;
455 f->computed_goto_common_label = NULL;
456 f->computed_goto_common_reg = NULL;
457 f->x_save_expr_regs = NULL;
458 f->x_stack_slot_list = NULL;
459 f->x_rtl_expr_chain = NULL;
460 f->x_tail_recursion_label = NULL;
461 f->x_tail_recursion_reentry = NULL;
462 f->x_arg_pointer_save_area = NULL;
463 f->x_clobber_return_insn = NULL;
464 f->x_context_display = NULL;
465 f->x_trampoline_list = NULL;
466 f->x_parm_birth_insn = NULL;
467 f->x_last_parm_insn = NULL;
468 f->x_parm_reg_stack_loc = NULL;
469 f->fixup_var_refs_queue = NULL;
470 f->original_arg_vector = NULL;
471 f->original_decl_initial = NULL;
472 f->inl_last_parm_insn = NULL;
473 f->epilogue_delay_list = NULL;
476 /* Allocate fixed slots in the stack frame of the current function. */
478 /* Return size needed for stack frame based on slots so far allocated in
479 function F.
480 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
481 the caller may have to do that. */
483 HOST_WIDE_INT
484 get_func_frame_size (f)
485 struct function *f;
487 #ifdef FRAME_GROWS_DOWNWARD
488 return -f->x_frame_offset;
489 #else
490 return f->x_frame_offset;
491 #endif
494 /* Return size needed for stack frame based on slots so far allocated.
495 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
496 the caller may have to do that. */
497 HOST_WIDE_INT
498 get_frame_size ()
500 return get_func_frame_size (cfun);
503 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
504 with machine mode MODE.
506 ALIGN controls the amount of alignment for the address of the slot:
507 0 means according to MODE,
508 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
509 positive specifies alignment boundary in bits.
511 We do not round to stack_boundary here.
513 FUNCTION specifies the function to allocate in. */
515 static rtx
516 assign_stack_local_1 (mode, size, align, function)
517 enum machine_mode mode;
518 HOST_WIDE_INT size;
519 int align;
520 struct function *function;
522 rtx x, addr;
523 int bigend_correction = 0;
524 int alignment;
525 int frame_off, frame_alignment, frame_phase;
527 if (align == 0)
529 tree type;
531 if (mode == BLKmode)
532 alignment = BIGGEST_ALIGNMENT;
533 else
534 alignment = GET_MODE_ALIGNMENT (mode);
536 /* Allow the target to (possibly) increase the alignment of this
537 stack slot. */
538 type = (*lang_hooks.types.type_for_mode) (mode, 0);
539 if (type)
540 alignment = LOCAL_ALIGNMENT (type, alignment);
542 alignment /= BITS_PER_UNIT;
544 else if (align == -1)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 size = CEIL_ROUND (size, alignment);
549 else
550 alignment = align / BITS_PER_UNIT;
552 #ifdef FRAME_GROWS_DOWNWARD
553 function->x_frame_offset -= size;
554 #endif
556 /* Ignore alignment we can't do with expected alignment of the boundary. */
557 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
558 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
560 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
561 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
563 /* Calculate how many bytes the start of local variables is off from
564 stack alignment. */
565 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
566 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
567 frame_phase = frame_off ? frame_alignment - frame_off : 0;
569 /* Round frame offset to that alignment.
570 We must be careful here, since FRAME_OFFSET might be negative and
571 division with a negative dividend isn't as well defined as we might
572 like. So we instead assume that ALIGNMENT is a power of two and
573 use logical operations which are unambiguous. */
574 #ifdef FRAME_GROWS_DOWNWARD
575 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
576 #else
577 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
578 #endif
580 /* On a big-endian machine, if we are allocating more space than we will use,
581 use the least significant bytes of those that are allocated. */
582 if (BYTES_BIG_ENDIAN && mode != BLKmode)
583 bigend_correction = size - GET_MODE_SIZE (mode);
585 /* If we have already instantiated virtual registers, return the actual
586 address relative to the frame pointer. */
587 if (function == cfun && virtuals_instantiated)
588 addr = plus_constant (frame_pointer_rtx,
589 trunc_int_for_mode
590 (frame_offset + bigend_correction
591 + STARTING_FRAME_OFFSET, Pmode));
592 else
593 addr = plus_constant (virtual_stack_vars_rtx,
594 trunc_int_for_mode
595 (function->x_frame_offset + bigend_correction,
596 Pmode));
598 #ifndef FRAME_GROWS_DOWNWARD
599 function->x_frame_offset += size;
600 #endif
602 x = gen_rtx_MEM (mode, addr);
604 function->x_stack_slot_list
605 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
607 return x;
610 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
611 current function. */
614 assign_stack_local (mode, size, align)
615 enum machine_mode mode;
616 HOST_WIDE_INT size;
617 int align;
619 return assign_stack_local_1 (mode, size, align, cfun);
622 /* Allocate a temporary stack slot and record it for possible later
623 reuse.
625 MODE is the machine mode to be given to the returned rtx.
627 SIZE is the size in units of the space required. We do no rounding here
628 since assign_stack_local will do any required rounding.
630 KEEP is 1 if this slot is to be retained after a call to
631 free_temp_slots. Automatic variables for a block are allocated
632 with this flag. KEEP is 2 if we allocate a longer term temporary,
633 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
634 if we are to allocate something at an inner level to be treated as
635 a variable in the block (e.g., a SAVE_EXPR).
637 TYPE is the type that will be used for the stack slot. */
640 assign_stack_temp_for_type (mode, size, keep, type)
641 enum machine_mode mode;
642 HOST_WIDE_INT size;
643 int keep;
644 tree type;
646 unsigned int align;
647 struct temp_slot *p, *best_p = 0;
648 rtx slot;
650 /* If SIZE is -1 it means that somebody tried to allocate a temporary
651 of a variable size. */
652 if (size == -1)
653 abort ();
655 if (mode == BLKmode)
656 align = BIGGEST_ALIGNMENT;
657 else
658 align = GET_MODE_ALIGNMENT (mode);
660 if (! type)
661 type = (*lang_hooks.types.type_for_mode) (mode, 0);
663 if (type)
664 align = LOCAL_ALIGNMENT (type, align);
666 /* Try to find an available, already-allocated temporary of the proper
667 mode which meets the size and alignment requirements. Choose the
668 smallest one with the closest alignment. */
669 for (p = temp_slots; p; p = p->next)
670 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
671 && ! p->in_use
672 && objects_must_conflict_p (p->type, type)
673 && (best_p == 0 || best_p->size > p->size
674 || (best_p->size == p->size && best_p->align > p->align)))
676 if (p->align == align && p->size == size)
678 best_p = 0;
679 break;
681 best_p = p;
684 /* Make our best, if any, the one to use. */
685 if (best_p)
687 /* If there are enough aligned bytes left over, make them into a new
688 temp_slot so that the extra bytes don't get wasted. Do this only
689 for BLKmode slots, so that we can be sure of the alignment. */
690 if (GET_MODE (best_p->slot) == BLKmode)
692 int alignment = best_p->align / BITS_PER_UNIT;
693 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
695 if (best_p->size - rounded_size >= alignment)
697 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
698 p->in_use = p->addr_taken = 0;
699 p->size = best_p->size - rounded_size;
700 p->base_offset = best_p->base_offset + rounded_size;
701 p->full_size = best_p->full_size - rounded_size;
702 p->slot = gen_rtx_MEM (BLKmode,
703 plus_constant (XEXP (best_p->slot, 0),
704 rounded_size));
705 p->align = best_p->align;
706 p->address = 0;
707 p->rtl_expr = 0;
708 p->type = best_p->type;
709 p->next = temp_slots;
710 temp_slots = p;
712 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
713 stack_slot_list);
715 best_p->size = rounded_size;
716 best_p->full_size = rounded_size;
720 p = best_p;
723 /* If we still didn't find one, make a new temporary. */
724 if (p == 0)
726 HOST_WIDE_INT frame_offset_old = frame_offset;
728 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
730 /* We are passing an explicit alignment request to assign_stack_local.
731 One side effect of that is assign_stack_local will not round SIZE
732 to ensure the frame offset remains suitably aligned.
734 So for requests which depended on the rounding of SIZE, we go ahead
735 and round it now. We also make sure ALIGNMENT is at least
736 BIGGEST_ALIGNMENT. */
737 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
738 abort ();
739 p->slot = assign_stack_local (mode,
740 (mode == BLKmode
741 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
742 : size),
743 align);
745 p->align = align;
747 /* The following slot size computation is necessary because we don't
748 know the actual size of the temporary slot until assign_stack_local
749 has performed all the frame alignment and size rounding for the
750 requested temporary. Note that extra space added for alignment
751 can be either above or below this stack slot depending on which
752 way the frame grows. We include the extra space if and only if it
753 is above this slot. */
754 #ifdef FRAME_GROWS_DOWNWARD
755 p->size = frame_offset_old - frame_offset;
756 #else
757 p->size = size;
758 #endif
760 /* Now define the fields used by combine_temp_slots. */
761 #ifdef FRAME_GROWS_DOWNWARD
762 p->base_offset = frame_offset;
763 p->full_size = frame_offset_old - frame_offset;
764 #else
765 p->base_offset = frame_offset_old;
766 p->full_size = frame_offset - frame_offset_old;
767 #endif
768 p->address = 0;
769 p->next = temp_slots;
770 temp_slots = p;
773 p->in_use = 1;
774 p->addr_taken = 0;
775 p->rtl_expr = seq_rtl_expr;
776 p->type = type;
778 if (keep == 2)
780 p->level = target_temp_slot_level;
781 p->keep = 0;
783 else if (keep == 3)
785 p->level = var_temp_slot_level;
786 p->keep = 0;
788 else
790 p->level = temp_slot_level;
791 p->keep = keep;
795 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
796 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
797 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
799 /* If we know the alias set for the memory that will be used, use
800 it. If there's no TYPE, then we don't know anything about the
801 alias set for the memory. */
802 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
803 set_mem_align (slot, align);
805 /* If a type is specified, set the relevant flags. */
806 if (type != 0)
808 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
809 && TYPE_READONLY (type));
810 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
811 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
814 return slot;
817 /* Allocate a temporary stack slot and record it for possible later
818 reuse. First three arguments are same as in preceding function. */
821 assign_stack_temp (mode, size, keep)
822 enum machine_mode mode;
823 HOST_WIDE_INT size;
824 int keep;
826 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
829 /* Assign a temporary.
830 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
831 and so that should be used in error messages. In either case, we
832 allocate of the given type.
833 KEEP is as for assign_stack_temp.
834 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
835 it is 0 if a register is OK.
836 DONT_PROMOTE is 1 if we should not promote values in register
837 to wider modes. */
840 assign_temp (type_or_decl, keep, memory_required, dont_promote)
841 tree type_or_decl;
842 int keep;
843 int memory_required;
844 int dont_promote ATTRIBUTE_UNUSED;
846 tree type, decl;
847 enum machine_mode mode;
848 #ifndef PROMOTE_FOR_CALL_ONLY
849 int unsignedp;
850 #endif
852 if (DECL_P (type_or_decl))
853 decl = type_or_decl, type = TREE_TYPE (decl);
854 else
855 decl = NULL, type = type_or_decl;
857 mode = TYPE_MODE (type);
858 #ifndef PROMOTE_FOR_CALL_ONLY
859 unsignedp = TREE_UNSIGNED (type);
860 #endif
862 if (mode == BLKmode || memory_required)
864 HOST_WIDE_INT size = int_size_in_bytes (type);
865 rtx tmp;
867 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
868 problems with allocating the stack space. */
869 if (size == 0)
870 size = 1;
872 /* Unfortunately, we don't yet know how to allocate variable-sized
873 temporaries. However, sometimes we have a fixed upper limit on
874 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
875 instead. This is the case for Chill variable-sized strings. */
876 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
877 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
878 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
879 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
881 /* The size of the temporary may be too large to fit into an integer. */
882 /* ??? Not sure this should happen except for user silliness, so limit
883 this to things that aren't compiler-generated temporaries. The
884 rest of the time we'll abort in assign_stack_temp_for_type. */
885 if (decl && size == -1
886 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
888 error_with_decl (decl, "size of variable `%s' is too large");
889 size = 1;
892 tmp = assign_stack_temp_for_type (mode, size, keep, type);
893 return tmp;
896 #ifndef PROMOTE_FOR_CALL_ONLY
897 if (! dont_promote)
898 mode = promote_mode (type, mode, &unsignedp, 0);
899 #endif
901 return gen_reg_rtx (mode);
904 /* Combine temporary stack slots which are adjacent on the stack.
906 This allows for better use of already allocated stack space. This is only
907 done for BLKmode slots because we can be sure that we won't have alignment
908 problems in this case. */
910 void
911 combine_temp_slots ()
913 struct temp_slot *p, *q;
914 struct temp_slot *prev_p, *prev_q;
915 int num_slots;
917 /* We can't combine slots, because the information about which slot
918 is in which alias set will be lost. */
919 if (flag_strict_aliasing)
920 return;
922 /* If there are a lot of temp slots, don't do anything unless
923 high levels of optimization. */
924 if (! flag_expensive_optimizations)
925 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
926 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
927 return;
929 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
931 int delete_p = 0;
933 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
934 for (q = p->next, prev_q = p; q; q = prev_q->next)
936 int delete_q = 0;
937 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
939 if (p->base_offset + p->full_size == q->base_offset)
941 /* Q comes after P; combine Q into P. */
942 p->size += q->size;
943 p->full_size += q->full_size;
944 delete_q = 1;
946 else if (q->base_offset + q->full_size == p->base_offset)
948 /* P comes after Q; combine P into Q. */
949 q->size += p->size;
950 q->full_size += p->full_size;
951 delete_p = 1;
952 break;
955 /* Either delete Q or advance past it. */
956 if (delete_q)
957 prev_q->next = q->next;
958 else
959 prev_q = q;
961 /* Either delete P or advance past it. */
962 if (delete_p)
964 if (prev_p)
965 prev_p->next = p->next;
966 else
967 temp_slots = p->next;
969 else
970 prev_p = p;
974 /* Find the temp slot corresponding to the object at address X. */
976 static struct temp_slot *
977 find_temp_slot_from_address (x)
978 rtx x;
980 struct temp_slot *p;
981 rtx next;
983 for (p = temp_slots; p; p = p->next)
985 if (! p->in_use)
986 continue;
988 else if (XEXP (p->slot, 0) == x
989 || p->address == x
990 || (GET_CODE (x) == PLUS
991 && XEXP (x, 0) == virtual_stack_vars_rtx
992 && GET_CODE (XEXP (x, 1)) == CONST_INT
993 && INTVAL (XEXP (x, 1)) >= p->base_offset
994 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
995 return p;
997 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
998 for (next = p->address; next; next = XEXP (next, 1))
999 if (XEXP (next, 0) == x)
1000 return p;
1003 /* If we have a sum involving a register, see if it points to a temp
1004 slot. */
1005 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1006 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1007 return p;
1008 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1009 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1010 return p;
1012 return 0;
1015 /* Indicate that NEW is an alternate way of referring to the temp slot
1016 that previously was known by OLD. */
1018 void
1019 update_temp_slot_address (old, new)
1020 rtx old, new;
1022 struct temp_slot *p;
1024 if (rtx_equal_p (old, new))
1025 return;
1027 p = find_temp_slot_from_address (old);
1029 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1030 is a register, see if one operand of the PLUS is a temporary
1031 location. If so, NEW points into it. Otherwise, if both OLD and
1032 NEW are a PLUS and if there is a register in common between them.
1033 If so, try a recursive call on those values. */
1034 if (p == 0)
1036 if (GET_CODE (old) != PLUS)
1037 return;
1039 if (GET_CODE (new) == REG)
1041 update_temp_slot_address (XEXP (old, 0), new);
1042 update_temp_slot_address (XEXP (old, 1), new);
1043 return;
1045 else if (GET_CODE (new) != PLUS)
1046 return;
1048 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1049 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1050 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1051 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1052 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1053 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1054 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1055 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1057 return;
1060 /* Otherwise add an alias for the temp's address. */
1061 else if (p->address == 0)
1062 p->address = new;
1063 else
1065 if (GET_CODE (p->address) != EXPR_LIST)
1066 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1068 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1072 /* If X could be a reference to a temporary slot, mark the fact that its
1073 address was taken. */
1075 void
1076 mark_temp_addr_taken (x)
1077 rtx x;
1079 struct temp_slot *p;
1081 if (x == 0)
1082 return;
1084 /* If X is not in memory or is at a constant address, it cannot be in
1085 a temporary slot. */
1086 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1087 return;
1089 p = find_temp_slot_from_address (XEXP (x, 0));
1090 if (p != 0)
1091 p->addr_taken = 1;
1094 /* If X could be a reference to a temporary slot, mark that slot as
1095 belonging to the to one level higher than the current level. If X
1096 matched one of our slots, just mark that one. Otherwise, we can't
1097 easily predict which it is, so upgrade all of them. Kept slots
1098 need not be touched.
1100 This is called when an ({...}) construct occurs and a statement
1101 returns a value in memory. */
1103 void
1104 preserve_temp_slots (x)
1105 rtx x;
1107 struct temp_slot *p = 0;
1109 /* If there is no result, we still might have some objects whose address
1110 were taken, so we need to make sure they stay around. */
1111 if (x == 0)
1113 for (p = temp_slots; p; p = p->next)
1114 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1115 p->level--;
1117 return;
1120 /* If X is a register that is being used as a pointer, see if we have
1121 a temporary slot we know it points to. To be consistent with
1122 the code below, we really should preserve all non-kept slots
1123 if we can't find a match, but that seems to be much too costly. */
1124 if (GET_CODE (x) == REG && REG_POINTER (x))
1125 p = find_temp_slot_from_address (x);
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot, but it can contain something whose address was
1129 taken. */
1130 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1132 for (p = temp_slots; p; p = p->next)
1133 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1134 p->level--;
1136 return;
1139 /* First see if we can find a match. */
1140 if (p == 0)
1141 p = find_temp_slot_from_address (XEXP (x, 0));
1143 if (p != 0)
1145 /* Move everything at our level whose address was taken to our new
1146 level in case we used its address. */
1147 struct temp_slot *q;
1149 if (p->level == temp_slot_level)
1151 for (q = temp_slots; q; q = q->next)
1152 if (q != p && q->addr_taken && q->level == p->level)
1153 q->level--;
1155 p->level--;
1156 p->addr_taken = 0;
1158 return;
1161 /* Otherwise, preserve all non-kept slots at this level. */
1162 for (p = temp_slots; p; p = p->next)
1163 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1164 p->level--;
1167 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1168 with that RTL_EXPR, promote it into a temporary slot at the present
1169 level so it will not be freed when we free slots made in the
1170 RTL_EXPR. */
1172 void
1173 preserve_rtl_expr_result (x)
1174 rtx x;
1176 struct temp_slot *p;
1178 /* If X is not in memory or is at a constant address, it cannot be in
1179 a temporary slot. */
1180 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1181 return;
1183 /* If we can find a match, move it to our level unless it is already at
1184 an upper level. */
1185 p = find_temp_slot_from_address (XEXP (x, 0));
1186 if (p != 0)
1188 p->level = MIN (p->level, temp_slot_level);
1189 p->rtl_expr = 0;
1192 return;
1195 /* Free all temporaries used so far. This is normally called at the end
1196 of generating code for a statement. Don't free any temporaries
1197 currently in use for an RTL_EXPR that hasn't yet been emitted.
1198 We could eventually do better than this since it can be reused while
1199 generating the same RTL_EXPR, but this is complex and probably not
1200 worthwhile. */
1202 void
1203 free_temp_slots ()
1205 struct temp_slot *p;
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && ! p->keep
1209 && p->rtl_expr == 0)
1210 p->in_use = 0;
1212 combine_temp_slots ();
1215 /* Free all temporary slots used in T, an RTL_EXPR node. */
1217 void
1218 free_temps_for_rtl_expr (t)
1219 tree t;
1221 struct temp_slot *p;
1223 for (p = temp_slots; p; p = p->next)
1224 if (p->rtl_expr == t)
1226 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1227 needs to be preserved. This can happen if a temporary in
1228 the RTL_EXPR was addressed; preserve_temp_slots will move
1229 the temporary into a higher level. */
1230 if (temp_slot_level <= p->level)
1231 p->in_use = 0;
1232 else
1233 p->rtl_expr = NULL_TREE;
1236 combine_temp_slots ();
1239 /* Mark all temporaries ever allocated in this function as not suitable
1240 for reuse until the current level is exited. */
1242 void
1243 mark_all_temps_used ()
1245 struct temp_slot *p;
1247 for (p = temp_slots; p; p = p->next)
1249 p->in_use = p->keep = 1;
1250 p->level = MIN (p->level, temp_slot_level);
1254 /* Push deeper into the nesting level for stack temporaries. */
1256 void
1257 push_temp_slots ()
1259 temp_slot_level++;
1262 /* Pop a temporary nesting level. All slots in use in the current level
1263 are freed. */
1265 void
1266 pop_temp_slots ()
1268 struct temp_slot *p;
1270 for (p = temp_slots; p; p = p->next)
1271 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1272 p->in_use = 0;
1274 combine_temp_slots ();
1276 temp_slot_level--;
1279 /* Initialize temporary slots. */
1281 void
1282 init_temp_slots ()
1284 /* We have not allocated any temporaries yet. */
1285 temp_slots = 0;
1286 temp_slot_level = 0;
1287 var_temp_slot_level = 0;
1288 target_temp_slot_level = 0;
1291 /* Retroactively move an auto variable from a register to a stack slot.
1292 This is done when an address-reference to the variable is seen. */
1294 void
1295 put_var_into_stack (decl)
1296 tree decl;
1298 rtx reg;
1299 enum machine_mode promoted_mode, decl_mode;
1300 struct function *function = 0;
1301 tree context;
1302 int can_use_addressof;
1303 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1304 int usedp = (TREE_USED (decl)
1305 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1307 context = decl_function_context (decl);
1309 /* Get the current rtl used for this object and its original mode. */
1310 reg = (TREE_CODE (decl) == SAVE_EXPR
1311 ? SAVE_EXPR_RTL (decl)
1312 : DECL_RTL_IF_SET (decl));
1314 /* No need to do anything if decl has no rtx yet
1315 since in that case caller is setting TREE_ADDRESSABLE
1316 and a stack slot will be assigned when the rtl is made. */
1317 if (reg == 0)
1318 return;
1320 /* Get the declared mode for this object. */
1321 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1322 : DECL_MODE (decl));
1323 /* Get the mode it's actually stored in. */
1324 promoted_mode = GET_MODE (reg);
1326 /* If this variable comes from an outer function, find that
1327 function's saved context. Don't use find_function_data here,
1328 because it might not be in any active function.
1329 FIXME: Is that really supposed to happen?
1330 It does in ObjC at least. */
1331 if (context != current_function_decl && context != inline_function_decl)
1332 for (function = outer_function_chain; function; function = function->outer)
1333 if (function->decl == context)
1334 break;
1336 /* If this is a variable-size object with a pseudo to address it,
1337 put that pseudo into the stack, if the var is nonlocal. */
1338 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1339 && GET_CODE (reg) == MEM
1340 && GET_CODE (XEXP (reg, 0)) == REG
1341 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1343 reg = XEXP (reg, 0);
1344 decl_mode = promoted_mode = GET_MODE (reg);
1347 can_use_addressof
1348 = (function == 0
1349 && optimize > 0
1350 /* FIXME make it work for promoted modes too */
1351 && decl_mode == promoted_mode
1352 #ifdef NON_SAVING_SETJMP
1353 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1354 #endif
1357 /* If we can't use ADDRESSOF, make sure we see through one we already
1358 generated. */
1359 if (! can_use_addressof && GET_CODE (reg) == MEM
1360 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1361 reg = XEXP (XEXP (reg, 0), 0);
1363 /* Now we should have a value that resides in one or more pseudo regs. */
1365 if (GET_CODE (reg) == REG)
1367 /* If this variable lives in the current function and we don't need
1368 to put things in the stack for the sake of setjmp, try to keep it
1369 in a register until we know we actually need the address. */
1370 if (can_use_addressof)
1371 gen_mem_addressof (reg, decl);
1372 else
1373 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1374 decl_mode, volatilep, 0, usedp, 0);
1376 else if (GET_CODE (reg) == CONCAT)
1378 /* A CONCAT contains two pseudos; put them both in the stack.
1379 We do it so they end up consecutive.
1380 We fixup references to the parts only after we fixup references
1381 to the whole CONCAT, lest we do double fixups for the latter
1382 references. */
1383 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1384 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1385 rtx lopart = XEXP (reg, 0);
1386 rtx hipart = XEXP (reg, 1);
1387 #ifdef FRAME_GROWS_DOWNWARD
1388 /* Since part 0 should have a lower address, do it second. */
1389 put_reg_into_stack (function, hipart, part_type, part_mode,
1390 part_mode, volatilep, 0, 0, 0);
1391 put_reg_into_stack (function, lopart, part_type, part_mode,
1392 part_mode, volatilep, 0, 0, 0);
1393 #else
1394 put_reg_into_stack (function, lopart, part_type, part_mode,
1395 part_mode, volatilep, 0, 0, 0);
1396 put_reg_into_stack (function, hipart, part_type, part_mode,
1397 part_mode, volatilep, 0, 0, 0);
1398 #endif
1400 /* Change the CONCAT into a combined MEM for both parts. */
1401 PUT_CODE (reg, MEM);
1402 MEM_ATTRS (reg) = 0;
1404 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1405 already computed alias sets. Here we want to re-generate. */
1406 if (DECL_P (decl))
1407 SET_DECL_RTL (decl, NULL);
1408 set_mem_attributes (reg, decl, 1);
1409 if (DECL_P (decl))
1410 SET_DECL_RTL (decl, reg);
1412 /* The two parts are in memory order already.
1413 Use the lower parts address as ours. */
1414 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1415 /* Prevent sharing of rtl that might lose. */
1416 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1417 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1418 if (usedp)
1420 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1421 promoted_mode, 0);
1422 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1423 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1426 else
1427 return;
1430 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1431 into the stack frame of FUNCTION (0 means the current function).
1432 DECL_MODE is the machine mode of the user-level data type.
1433 PROMOTED_MODE is the machine mode of the register.
1434 VOLATILE_P is nonzero if this is for a "volatile" decl.
1435 USED_P is nonzero if this reg might have already been used in an insn. */
1437 static void
1438 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1439 original_regno, used_p, ht)
1440 struct function *function;
1441 rtx reg;
1442 tree type;
1443 enum machine_mode promoted_mode, decl_mode;
1444 int volatile_p;
1445 unsigned int original_regno;
1446 int used_p;
1447 htab_t ht;
1449 struct function *func = function ? function : cfun;
1450 rtx new = 0;
1451 unsigned int regno = original_regno;
1453 if (regno == 0)
1454 regno = REGNO (reg);
1456 if (regno < func->x_max_parm_reg)
1457 new = func->x_parm_reg_stack_loc[regno];
1459 if (new == 0)
1460 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1462 PUT_CODE (reg, MEM);
1463 PUT_MODE (reg, decl_mode);
1464 XEXP (reg, 0) = XEXP (new, 0);
1465 MEM_ATTRS (reg) = 0;
1466 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1467 MEM_VOLATILE_P (reg) = volatile_p;
1469 /* If this is a memory ref that contains aggregate components,
1470 mark it as such for cse and loop optimize. If we are reusing a
1471 previously generated stack slot, then we need to copy the bit in
1472 case it was set for other reasons. For instance, it is set for
1473 __builtin_va_alist. */
1474 if (type)
1476 MEM_SET_IN_STRUCT_P (reg,
1477 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1478 set_mem_alias_set (reg, get_alias_set (type));
1481 if (used_p)
1482 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1485 /* Make sure that all refs to the variable, previously made
1486 when it was a register, are fixed up to be valid again.
1487 See function above for meaning of arguments. */
1489 static void
1490 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1491 struct function *function;
1492 rtx reg;
1493 tree type;
1494 enum machine_mode promoted_mode;
1495 htab_t ht;
1497 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1499 if (function != 0)
1501 struct var_refs_queue *temp;
1503 temp
1504 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1505 temp->modified = reg;
1506 temp->promoted_mode = promoted_mode;
1507 temp->unsignedp = unsigned_p;
1508 temp->next = function->fixup_var_refs_queue;
1509 function->fixup_var_refs_queue = temp;
1511 else
1512 /* Variable is local; fix it up now. */
1513 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1516 static void
1517 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1518 rtx var;
1519 enum machine_mode promoted_mode;
1520 int unsignedp;
1521 htab_t ht;
1522 rtx may_share;
1524 tree pending;
1525 rtx first_insn = get_insns ();
1526 struct sequence_stack *stack = seq_stack;
1527 tree rtl_exps = rtl_expr_chain;
1529 /* If there's a hash table, it must record all uses of VAR. */
1530 if (ht)
1532 if (stack != 0)
1533 abort ();
1534 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1535 may_share);
1536 return;
1539 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1540 stack == 0, may_share);
1542 /* Scan all pending sequences too. */
1543 for (; stack; stack = stack->next)
1545 push_to_full_sequence (stack->first, stack->last);
1546 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1547 stack->next != 0, may_share);
1548 /* Update remembered end of sequence
1549 in case we added an insn at the end. */
1550 stack->last = get_last_insn ();
1551 end_sequence ();
1554 /* Scan all waiting RTL_EXPRs too. */
1555 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1557 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1558 if (seq != const0_rtx && seq != 0)
1560 push_to_sequence (seq);
1561 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1562 may_share);
1563 end_sequence ();
1568 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1569 some part of an insn. Return a struct fixup_replacement whose OLD
1570 value is equal to X. Allocate a new structure if no such entry exists. */
1572 static struct fixup_replacement *
1573 find_fixup_replacement (replacements, x)
1574 struct fixup_replacement **replacements;
1575 rtx x;
1577 struct fixup_replacement *p;
1579 /* See if we have already replaced this. */
1580 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1583 if (p == 0)
1585 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1586 p->old = x;
1587 p->new = 0;
1588 p->next = *replacements;
1589 *replacements = p;
1592 return p;
1595 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1596 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1597 for the current function. MAY_SHARE is either a MEM that is not
1598 to be unshared or a list of them. */
1600 static void
1601 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1602 rtx insn;
1603 rtx var;
1604 enum machine_mode promoted_mode;
1605 int unsignedp;
1606 int toplevel;
1607 rtx may_share;
1609 while (insn)
1611 /* fixup_var_refs_insn might modify insn, so save its next
1612 pointer now. */
1613 rtx next = NEXT_INSN (insn);
1615 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1616 the three sequences they (potentially) contain, and process
1617 them recursively. The CALL_INSN itself is not interesting. */
1619 if (GET_CODE (insn) == CALL_INSN
1620 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1622 int i;
1624 /* Look at the Normal call, sibling call and tail recursion
1625 sequences attached to the CALL_PLACEHOLDER. */
1626 for (i = 0; i < 3; i++)
1628 rtx seq = XEXP (PATTERN (insn), i);
1629 if (seq)
1631 push_to_sequence (seq);
1632 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1633 may_share);
1634 XEXP (PATTERN (insn), i) = get_insns ();
1635 end_sequence ();
1640 else if (INSN_P (insn))
1641 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1642 may_share);
1644 insn = next;
1648 /* Look up the insns which reference VAR in HT and fix them up. Other
1649 arguments are the same as fixup_var_refs_insns.
1651 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1652 because the hash table will point straight to the interesting insn
1653 (inside the CALL_PLACEHOLDER). */
1655 static void
1656 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1657 htab_t ht;
1658 rtx var;
1659 enum machine_mode promoted_mode;
1660 int unsignedp;
1661 rtx may_share;
1663 struct insns_for_mem_entry tmp;
1664 struct insns_for_mem_entry *ime;
1665 rtx insn_list;
1667 tmp.key = var;
1668 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1669 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1670 if (INSN_P (XEXP (insn_list, 0)))
1671 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1672 unsignedp, 1, may_share);
1676 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1677 the insn under examination, VAR is the variable to fix up
1678 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1679 TOPLEVEL is nonzero if this is the main insn chain for this
1680 function. */
1682 static void
1683 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1684 rtx insn;
1685 rtx var;
1686 enum machine_mode promoted_mode;
1687 int unsignedp;
1688 int toplevel;
1689 rtx no_share;
1691 rtx call_dest = 0;
1692 rtx set, prev, prev_set;
1693 rtx note;
1695 /* Remember the notes in case we delete the insn. */
1696 note = REG_NOTES (insn);
1698 /* If this is a CLOBBER of VAR, delete it.
1700 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1701 and REG_RETVAL notes too. */
1702 if (GET_CODE (PATTERN (insn)) == CLOBBER
1703 && (XEXP (PATTERN (insn), 0) == var
1704 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1705 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1706 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1708 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1709 /* The REG_LIBCALL note will go away since we are going to
1710 turn INSN into a NOTE, so just delete the
1711 corresponding REG_RETVAL note. */
1712 remove_note (XEXP (note, 0),
1713 find_reg_note (XEXP (note, 0), REG_RETVAL,
1714 NULL_RTX));
1716 delete_insn (insn);
1719 /* The insn to load VAR from a home in the arglist
1720 is now a no-op. When we see it, just delete it.
1721 Similarly if this is storing VAR from a register from which
1722 it was loaded in the previous insn. This will occur
1723 when an ADDRESSOF was made for an arglist slot. */
1724 else if (toplevel
1725 && (set = single_set (insn)) != 0
1726 && SET_DEST (set) == var
1727 /* If this represents the result of an insn group,
1728 don't delete the insn. */
1729 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1730 && (rtx_equal_p (SET_SRC (set), var)
1731 || (GET_CODE (SET_SRC (set)) == REG
1732 && (prev = prev_nonnote_insn (insn)) != 0
1733 && (prev_set = single_set (prev)) != 0
1734 && SET_DEST (prev_set) == SET_SRC (set)
1735 && rtx_equal_p (SET_SRC (prev_set), var))))
1737 delete_insn (insn);
1739 else
1741 struct fixup_replacement *replacements = 0;
1742 rtx next_insn = NEXT_INSN (insn);
1744 if (SMALL_REGISTER_CLASSES)
1746 /* If the insn that copies the results of a CALL_INSN
1747 into a pseudo now references VAR, we have to use an
1748 intermediate pseudo since we want the life of the
1749 return value register to be only a single insn.
1751 If we don't use an intermediate pseudo, such things as
1752 address computations to make the address of VAR valid
1753 if it is not can be placed between the CALL_INSN and INSN.
1755 To make sure this doesn't happen, we record the destination
1756 of the CALL_INSN and see if the next insn uses both that
1757 and VAR. */
1759 if (call_dest != 0 && GET_CODE (insn) == INSN
1760 && reg_mentioned_p (var, PATTERN (insn))
1761 && reg_mentioned_p (call_dest, PATTERN (insn)))
1763 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1765 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1767 PATTERN (insn) = replace_rtx (PATTERN (insn),
1768 call_dest, temp);
1771 if (GET_CODE (insn) == CALL_INSN
1772 && GET_CODE (PATTERN (insn)) == SET)
1773 call_dest = SET_DEST (PATTERN (insn));
1774 else if (GET_CODE (insn) == CALL_INSN
1775 && GET_CODE (PATTERN (insn)) == PARALLEL
1776 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1777 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1778 else
1779 call_dest = 0;
1782 /* See if we have to do anything to INSN now that VAR is in
1783 memory. If it needs to be loaded into a pseudo, use a single
1784 pseudo for the entire insn in case there is a MATCH_DUP
1785 between two operands. We pass a pointer to the head of
1786 a list of struct fixup_replacements. If fixup_var_refs_1
1787 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1788 it will record them in this list.
1790 If it allocated a pseudo for any replacement, we copy into
1791 it here. */
1793 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1794 &replacements, no_share);
1796 /* If this is last_parm_insn, and any instructions were output
1797 after it to fix it up, then we must set last_parm_insn to
1798 the last such instruction emitted. */
1799 if (insn == last_parm_insn)
1800 last_parm_insn = PREV_INSN (next_insn);
1802 while (replacements)
1804 struct fixup_replacement *next;
1806 if (GET_CODE (replacements->new) == REG)
1808 rtx insert_before;
1809 rtx seq;
1811 /* OLD might be a (subreg (mem)). */
1812 if (GET_CODE (replacements->old) == SUBREG)
1813 replacements->old
1814 = fixup_memory_subreg (replacements->old, insn,
1815 promoted_mode, 0);
1816 else
1817 replacements->old
1818 = fixup_stack_1 (replacements->old, insn);
1820 insert_before = insn;
1822 /* If we are changing the mode, do a conversion.
1823 This might be wasteful, but combine.c will
1824 eliminate much of the waste. */
1826 if (GET_MODE (replacements->new)
1827 != GET_MODE (replacements->old))
1829 start_sequence ();
1830 convert_move (replacements->new,
1831 replacements->old, unsignedp);
1832 seq = get_insns ();
1833 end_sequence ();
1835 else
1836 seq = gen_move_insn (replacements->new,
1837 replacements->old);
1839 emit_insn_before (seq, insert_before);
1842 next = replacements->next;
1843 free (replacements);
1844 replacements = next;
1848 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1849 But don't touch other insns referred to by reg-notes;
1850 we will get them elsewhere. */
1851 while (note)
1853 if (GET_CODE (note) != INSN_LIST)
1854 XEXP (note, 0)
1855 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1856 promoted_mode, 1);
1857 note = XEXP (note, 1);
1861 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1862 See if the rtx expression at *LOC in INSN needs to be changed.
1864 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1865 contain a list of original rtx's and replacements. If we find that we need
1866 to modify this insn by replacing a memory reference with a pseudo or by
1867 making a new MEM to implement a SUBREG, we consult that list to see if
1868 we have already chosen a replacement. If none has already been allocated,
1869 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1870 or the SUBREG, as appropriate, to the pseudo. */
1872 static void
1873 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1874 rtx var;
1875 enum machine_mode promoted_mode;
1876 rtx *loc;
1877 rtx insn;
1878 struct fixup_replacement **replacements;
1879 rtx no_share;
1881 int i;
1882 rtx x = *loc;
1883 RTX_CODE code = GET_CODE (x);
1884 const char *fmt;
1885 rtx tem, tem1;
1886 struct fixup_replacement *replacement;
1888 switch (code)
1890 case ADDRESSOF:
1891 if (XEXP (x, 0) == var)
1893 /* Prevent sharing of rtl that might lose. */
1894 rtx sub = copy_rtx (XEXP (var, 0));
1896 if (! validate_change (insn, loc, sub, 0))
1898 rtx y = gen_reg_rtx (GET_MODE (sub));
1899 rtx seq, new_insn;
1901 /* We should be able to replace with a register or all is lost.
1902 Note that we can't use validate_change to verify this, since
1903 we're not caring for replacing all dups simultaneously. */
1904 if (! validate_replace_rtx (*loc, y, insn))
1905 abort ();
1907 /* Careful! First try to recognize a direct move of the
1908 value, mimicking how things are done in gen_reload wrt
1909 PLUS. Consider what happens when insn is a conditional
1910 move instruction and addsi3 clobbers flags. */
1912 start_sequence ();
1913 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1914 seq = get_insns ();
1915 end_sequence ();
1917 if (recog_memoized (new_insn) < 0)
1919 /* That failed. Fall back on force_operand and hope. */
1921 start_sequence ();
1922 sub = force_operand (sub, y);
1923 if (sub != y)
1924 emit_insn (gen_move_insn (y, sub));
1925 seq = get_insns ();
1926 end_sequence ();
1929 #ifdef HAVE_cc0
1930 /* Don't separate setter from user. */
1931 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1932 insn = PREV_INSN (insn);
1933 #endif
1935 emit_insn_before (seq, insn);
1938 return;
1940 case MEM:
1941 if (var == x)
1943 /* If we already have a replacement, use it. Otherwise,
1944 try to fix up this address in case it is invalid. */
1946 replacement = find_fixup_replacement (replacements, var);
1947 if (replacement->new)
1949 *loc = replacement->new;
1950 return;
1953 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1955 /* Unless we are forcing memory to register or we changed the mode,
1956 we can leave things the way they are if the insn is valid. */
1958 INSN_CODE (insn) = -1;
1959 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1960 && recog_memoized (insn) >= 0)
1961 return;
1963 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1964 return;
1967 /* If X contains VAR, we need to unshare it here so that we update
1968 each occurrence separately. But all identical MEMs in one insn
1969 must be replaced with the same rtx because of the possibility of
1970 MATCH_DUPs. */
1972 if (reg_mentioned_p (var, x))
1974 replacement = find_fixup_replacement (replacements, x);
1975 if (replacement->new == 0)
1976 replacement->new = copy_most_rtx (x, no_share);
1978 *loc = x = replacement->new;
1979 code = GET_CODE (x);
1981 break;
1983 case REG:
1984 case CC0:
1985 case PC:
1986 case CONST_INT:
1987 case CONST:
1988 case SYMBOL_REF:
1989 case LABEL_REF:
1990 case CONST_DOUBLE:
1991 case CONST_VECTOR:
1992 return;
1994 case SIGN_EXTRACT:
1995 case ZERO_EXTRACT:
1996 /* Note that in some cases those types of expressions are altered
1997 by optimize_bit_field, and do not survive to get here. */
1998 if (XEXP (x, 0) == var
1999 || (GET_CODE (XEXP (x, 0)) == SUBREG
2000 && SUBREG_REG (XEXP (x, 0)) == var))
2002 /* Get TEM as a valid MEM in the mode presently in the insn.
2004 We don't worry about the possibility of MATCH_DUP here; it
2005 is highly unlikely and would be tricky to handle. */
2007 tem = XEXP (x, 0);
2008 if (GET_CODE (tem) == SUBREG)
2010 if (GET_MODE_BITSIZE (GET_MODE (tem))
2011 > GET_MODE_BITSIZE (GET_MODE (var)))
2013 replacement = find_fixup_replacement (replacements, var);
2014 if (replacement->new == 0)
2015 replacement->new = gen_reg_rtx (GET_MODE (var));
2016 SUBREG_REG (tem) = replacement->new;
2018 /* The following code works only if we have a MEM, so we
2019 need to handle the subreg here. We directly substitute
2020 it assuming that a subreg must be OK here. We already
2021 scheduled a replacement to copy the mem into the
2022 subreg. */
2023 XEXP (x, 0) = tem;
2024 return;
2026 else
2027 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2029 else
2030 tem = fixup_stack_1 (tem, insn);
2032 /* Unless we want to load from memory, get TEM into the proper mode
2033 for an extract from memory. This can only be done if the
2034 extract is at a constant position and length. */
2036 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2037 && GET_CODE (XEXP (x, 2)) == CONST_INT
2038 && ! mode_dependent_address_p (XEXP (tem, 0))
2039 && ! MEM_VOLATILE_P (tem))
2041 enum machine_mode wanted_mode = VOIDmode;
2042 enum machine_mode is_mode = GET_MODE (tem);
2043 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2045 if (GET_CODE (x) == ZERO_EXTRACT)
2047 enum machine_mode new_mode
2048 = mode_for_extraction (EP_extzv, 1);
2049 if (new_mode != MAX_MACHINE_MODE)
2050 wanted_mode = new_mode;
2052 else if (GET_CODE (x) == SIGN_EXTRACT)
2054 enum machine_mode new_mode
2055 = mode_for_extraction (EP_extv, 1);
2056 if (new_mode != MAX_MACHINE_MODE)
2057 wanted_mode = new_mode;
2060 /* If we have a narrower mode, we can do something. */
2061 if (wanted_mode != VOIDmode
2062 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2064 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2065 rtx old_pos = XEXP (x, 2);
2066 rtx newmem;
2068 /* If the bytes and bits are counted differently, we
2069 must adjust the offset. */
2070 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2071 offset = (GET_MODE_SIZE (is_mode)
2072 - GET_MODE_SIZE (wanted_mode) - offset);
2074 pos %= GET_MODE_BITSIZE (wanted_mode);
2076 newmem = adjust_address_nv (tem, wanted_mode, offset);
2078 /* Make the change and see if the insn remains valid. */
2079 INSN_CODE (insn) = -1;
2080 XEXP (x, 0) = newmem;
2081 XEXP (x, 2) = GEN_INT (pos);
2083 if (recog_memoized (insn) >= 0)
2084 return;
2086 /* Otherwise, restore old position. XEXP (x, 0) will be
2087 restored later. */
2088 XEXP (x, 2) = old_pos;
2092 /* If we get here, the bitfield extract insn can't accept a memory
2093 reference. Copy the input into a register. */
2095 tem1 = gen_reg_rtx (GET_MODE (tem));
2096 emit_insn_before (gen_move_insn (tem1, tem), insn);
2097 XEXP (x, 0) = tem1;
2098 return;
2100 break;
2102 case SUBREG:
2103 if (SUBREG_REG (x) == var)
2105 /* If this is a special SUBREG made because VAR was promoted
2106 from a wider mode, replace it with VAR and call ourself
2107 recursively, this time saying that the object previously
2108 had its current mode (by virtue of the SUBREG). */
2110 if (SUBREG_PROMOTED_VAR_P (x))
2112 *loc = var;
2113 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2114 no_share);
2115 return;
2118 /* If this SUBREG makes VAR wider, it has become a paradoxical
2119 SUBREG with VAR in memory, but these aren't allowed at this
2120 stage of the compilation. So load VAR into a pseudo and take
2121 a SUBREG of that pseudo. */
2122 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2124 replacement = find_fixup_replacement (replacements, var);
2125 if (replacement->new == 0)
2126 replacement->new = gen_reg_rtx (promoted_mode);
2127 SUBREG_REG (x) = replacement->new;
2128 return;
2131 /* See if we have already found a replacement for this SUBREG.
2132 If so, use it. Otherwise, make a MEM and see if the insn
2133 is recognized. If not, or if we should force MEM into a register,
2134 make a pseudo for this SUBREG. */
2135 replacement = find_fixup_replacement (replacements, x);
2136 if (replacement->new)
2138 *loc = replacement->new;
2139 return;
2142 replacement->new = *loc = fixup_memory_subreg (x, insn,
2143 promoted_mode, 0);
2145 INSN_CODE (insn) = -1;
2146 if (! flag_force_mem && recog_memoized (insn) >= 0)
2147 return;
2149 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2150 return;
2152 break;
2154 case SET:
2155 /* First do special simplification of bit-field references. */
2156 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2157 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2158 optimize_bit_field (x, insn, 0);
2159 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2160 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2161 optimize_bit_field (x, insn, 0);
2163 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2164 into a register and then store it back out. */
2165 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2166 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2167 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2168 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2169 > GET_MODE_SIZE (GET_MODE (var))))
2171 replacement = find_fixup_replacement (replacements, var);
2172 if (replacement->new == 0)
2173 replacement->new = gen_reg_rtx (GET_MODE (var));
2175 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2176 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2179 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2180 insn into a pseudo and store the low part of the pseudo into VAR. */
2181 if (GET_CODE (SET_DEST (x)) == SUBREG
2182 && SUBREG_REG (SET_DEST (x)) == var
2183 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2184 > GET_MODE_SIZE (GET_MODE (var))))
2186 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2187 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2188 tem)),
2189 insn);
2190 break;
2194 rtx dest = SET_DEST (x);
2195 rtx src = SET_SRC (x);
2196 rtx outerdest = dest;
2198 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2199 || GET_CODE (dest) == SIGN_EXTRACT
2200 || GET_CODE (dest) == ZERO_EXTRACT)
2201 dest = XEXP (dest, 0);
2203 if (GET_CODE (src) == SUBREG)
2204 src = SUBREG_REG (src);
2206 /* If VAR does not appear at the top level of the SET
2207 just scan the lower levels of the tree. */
2209 if (src != var && dest != var)
2210 break;
2212 /* We will need to rerecognize this insn. */
2213 INSN_CODE (insn) = -1;
2215 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2216 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2218 /* Since this case will return, ensure we fixup all the
2219 operands here. */
2220 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2221 insn, replacements, no_share);
2222 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2223 insn, replacements, no_share);
2224 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2225 insn, replacements, no_share);
2227 tem = XEXP (outerdest, 0);
2229 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2230 that may appear inside a ZERO_EXTRACT.
2231 This was legitimate when the MEM was a REG. */
2232 if (GET_CODE (tem) == SUBREG
2233 && SUBREG_REG (tem) == var)
2234 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2235 else
2236 tem = fixup_stack_1 (tem, insn);
2238 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2239 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2240 && ! mode_dependent_address_p (XEXP (tem, 0))
2241 && ! MEM_VOLATILE_P (tem))
2243 enum machine_mode wanted_mode;
2244 enum machine_mode is_mode = GET_MODE (tem);
2245 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2247 wanted_mode = mode_for_extraction (EP_insv, 0);
2249 /* If we have a narrower mode, we can do something. */
2250 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2252 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2253 rtx old_pos = XEXP (outerdest, 2);
2254 rtx newmem;
2256 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2257 offset = (GET_MODE_SIZE (is_mode)
2258 - GET_MODE_SIZE (wanted_mode) - offset);
2260 pos %= GET_MODE_BITSIZE (wanted_mode);
2262 newmem = adjust_address_nv (tem, wanted_mode, offset);
2264 /* Make the change and see if the insn remains valid. */
2265 INSN_CODE (insn) = -1;
2266 XEXP (outerdest, 0) = newmem;
2267 XEXP (outerdest, 2) = GEN_INT (pos);
2269 if (recog_memoized (insn) >= 0)
2270 return;
2272 /* Otherwise, restore old position. XEXP (x, 0) will be
2273 restored later. */
2274 XEXP (outerdest, 2) = old_pos;
2278 /* If we get here, the bit-field store doesn't allow memory
2279 or isn't located at a constant position. Load the value into
2280 a register, do the store, and put it back into memory. */
2282 tem1 = gen_reg_rtx (GET_MODE (tem));
2283 emit_insn_before (gen_move_insn (tem1, tem), insn);
2284 emit_insn_after (gen_move_insn (tem, tem1), insn);
2285 XEXP (outerdest, 0) = tem1;
2286 return;
2289 /* STRICT_LOW_PART is a no-op on memory references
2290 and it can cause combinations to be unrecognizable,
2291 so eliminate it. */
2293 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2294 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2296 /* A valid insn to copy VAR into or out of a register
2297 must be left alone, to avoid an infinite loop here.
2298 If the reference to VAR is by a subreg, fix that up,
2299 since SUBREG is not valid for a memref.
2300 Also fix up the address of the stack slot.
2302 Note that we must not try to recognize the insn until
2303 after we know that we have valid addresses and no
2304 (subreg (mem ...) ...) constructs, since these interfere
2305 with determining the validity of the insn. */
2307 if ((SET_SRC (x) == var
2308 || (GET_CODE (SET_SRC (x)) == SUBREG
2309 && SUBREG_REG (SET_SRC (x)) == var))
2310 && (GET_CODE (SET_DEST (x)) == REG
2311 || (GET_CODE (SET_DEST (x)) == SUBREG
2312 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2313 && GET_MODE (var) == promoted_mode
2314 && x == single_set (insn))
2316 rtx pat, last;
2318 if (GET_CODE (SET_SRC (x)) == SUBREG
2319 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2320 > GET_MODE_SIZE (GET_MODE (var))))
2322 /* This (subreg VAR) is now a paradoxical subreg. We need
2323 to replace VAR instead of the subreg. */
2324 replacement = find_fixup_replacement (replacements, var);
2325 if (replacement->new == NULL_RTX)
2326 replacement->new = gen_reg_rtx (GET_MODE (var));
2327 SUBREG_REG (SET_SRC (x)) = replacement->new;
2329 else
2331 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2332 if (replacement->new)
2333 SET_SRC (x) = replacement->new;
2334 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2335 SET_SRC (x) = replacement->new
2336 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2338 else
2339 SET_SRC (x) = replacement->new
2340 = fixup_stack_1 (SET_SRC (x), insn);
2343 if (recog_memoized (insn) >= 0)
2344 return;
2346 /* INSN is not valid, but we know that we want to
2347 copy SET_SRC (x) to SET_DEST (x) in some way. So
2348 we generate the move and see whether it requires more
2349 than one insn. If it does, we emit those insns and
2350 delete INSN. Otherwise, we can just replace the pattern
2351 of INSN; we have already verified above that INSN has
2352 no other function that to do X. */
2354 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2355 if (NEXT_INSN (pat) != NULL_RTX)
2357 last = emit_insn_before (pat, insn);
2359 /* INSN might have REG_RETVAL or other important notes, so
2360 we need to store the pattern of the last insn in the
2361 sequence into INSN similarly to the normal case. LAST
2362 should not have REG_NOTES, but we allow them if INSN has
2363 no REG_NOTES. */
2364 if (REG_NOTES (last) && REG_NOTES (insn))
2365 abort ();
2366 if (REG_NOTES (last))
2367 REG_NOTES (insn) = REG_NOTES (last);
2368 PATTERN (insn) = PATTERN (last);
2370 delete_insn (last);
2372 else
2373 PATTERN (insn) = PATTERN (pat);
2375 return;
2378 if ((SET_DEST (x) == var
2379 || (GET_CODE (SET_DEST (x)) == SUBREG
2380 && SUBREG_REG (SET_DEST (x)) == var))
2381 && (GET_CODE (SET_SRC (x)) == REG
2382 || (GET_CODE (SET_SRC (x)) == SUBREG
2383 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2384 && GET_MODE (var) == promoted_mode
2385 && x == single_set (insn))
2387 rtx pat, last;
2389 if (GET_CODE (SET_DEST (x)) == SUBREG)
2390 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2391 promoted_mode, 0);
2392 else
2393 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2395 if (recog_memoized (insn) >= 0)
2396 return;
2398 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2399 if (NEXT_INSN (pat) != NULL_RTX)
2401 last = emit_insn_before (pat, insn);
2403 /* INSN might have REG_RETVAL or other important notes, so
2404 we need to store the pattern of the last insn in the
2405 sequence into INSN similarly to the normal case. LAST
2406 should not have REG_NOTES, but we allow them if INSN has
2407 no REG_NOTES. */
2408 if (REG_NOTES (last) && REG_NOTES (insn))
2409 abort ();
2410 if (REG_NOTES (last))
2411 REG_NOTES (insn) = REG_NOTES (last);
2412 PATTERN (insn) = PATTERN (last);
2414 delete_insn (last);
2416 else
2417 PATTERN (insn) = PATTERN (pat);
2419 return;
2422 /* Otherwise, storing into VAR must be handled specially
2423 by storing into a temporary and copying that into VAR
2424 with a new insn after this one. Note that this case
2425 will be used when storing into a promoted scalar since
2426 the insn will now have different modes on the input
2427 and output and hence will be invalid (except for the case
2428 of setting it to a constant, which does not need any
2429 change if it is valid). We generate extra code in that case,
2430 but combine.c will eliminate it. */
2432 if (dest == var)
2434 rtx temp;
2435 rtx fixeddest = SET_DEST (x);
2436 enum machine_mode temp_mode;
2438 /* STRICT_LOW_PART can be discarded, around a MEM. */
2439 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2440 fixeddest = XEXP (fixeddest, 0);
2441 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2442 if (GET_CODE (fixeddest) == SUBREG)
2444 fixeddest = fixup_memory_subreg (fixeddest, insn,
2445 promoted_mode, 0);
2446 temp_mode = GET_MODE (fixeddest);
2448 else
2450 fixeddest = fixup_stack_1 (fixeddest, insn);
2451 temp_mode = promoted_mode;
2454 temp = gen_reg_rtx (temp_mode);
2456 emit_insn_after (gen_move_insn (fixeddest,
2457 gen_lowpart (GET_MODE (fixeddest),
2458 temp)),
2459 insn);
2461 SET_DEST (x) = temp;
2465 default:
2466 break;
2469 /* Nothing special about this RTX; fix its operands. */
2471 fmt = GET_RTX_FORMAT (code);
2472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2474 if (fmt[i] == 'e')
2475 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2476 no_share);
2477 else if (fmt[i] == 'E')
2479 int j;
2480 for (j = 0; j < XVECLEN (x, i); j++)
2481 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2482 insn, replacements, no_share);
2487 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2488 The REG was placed on the stack, so X now has the form (SUBREG:m1
2489 (MEM:m2 ...)).
2491 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2492 must be emitted to compute NEWADDR, put them before INSN.
2494 UNCRITICAL nonzero means accept paradoxical subregs.
2495 This is used for subregs found inside REG_NOTES. */
2497 static rtx
2498 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2499 rtx x;
2500 rtx insn;
2501 enum machine_mode promoted_mode;
2502 int uncritical;
2504 int offset;
2505 rtx mem = SUBREG_REG (x);
2506 rtx addr = XEXP (mem, 0);
2507 enum machine_mode mode = GET_MODE (x);
2508 rtx result, seq;
2510 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2511 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2512 abort ();
2514 offset = SUBREG_BYTE (x);
2515 if (BYTES_BIG_ENDIAN)
2516 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2517 the offset so that it points to the right location within the
2518 MEM. */
2519 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2521 if (!flag_force_addr
2522 && memory_address_p (mode, plus_constant (addr, offset)))
2523 /* Shortcut if no insns need be emitted. */
2524 return adjust_address (mem, mode, offset);
2526 start_sequence ();
2527 result = adjust_address (mem, mode, offset);
2528 seq = get_insns ();
2529 end_sequence ();
2531 emit_insn_before (seq, insn);
2532 return result;
2535 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2536 Replace subexpressions of X in place.
2537 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2538 Otherwise return X, with its contents possibly altered.
2540 INSN, PROMOTED_MODE and UNCRITICAL are as for
2541 fixup_memory_subreg. */
2543 static rtx
2544 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2545 rtx x;
2546 rtx insn;
2547 enum machine_mode promoted_mode;
2548 int uncritical;
2550 enum rtx_code code;
2551 const char *fmt;
2552 int i;
2554 if (x == 0)
2555 return 0;
2557 code = GET_CODE (x);
2559 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2560 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2562 /* Nothing special about this RTX; fix its operands. */
2564 fmt = GET_RTX_FORMAT (code);
2565 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2567 if (fmt[i] == 'e')
2568 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2569 promoted_mode, uncritical);
2570 else if (fmt[i] == 'E')
2572 int j;
2573 for (j = 0; j < XVECLEN (x, i); j++)
2574 XVECEXP (x, i, j)
2575 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2576 promoted_mode, uncritical);
2579 return x;
2582 /* For each memory ref within X, if it refers to a stack slot
2583 with an out of range displacement, put the address in a temp register
2584 (emitting new insns before INSN to load these registers)
2585 and alter the memory ref to use that register.
2586 Replace each such MEM rtx with a copy, to avoid clobberage. */
2588 static rtx
2589 fixup_stack_1 (x, insn)
2590 rtx x;
2591 rtx insn;
2593 int i;
2594 RTX_CODE code = GET_CODE (x);
2595 const char *fmt;
2597 if (code == MEM)
2599 rtx ad = XEXP (x, 0);
2600 /* If we have address of a stack slot but it's not valid
2601 (displacement is too large), compute the sum in a register. */
2602 if (GET_CODE (ad) == PLUS
2603 && GET_CODE (XEXP (ad, 0)) == REG
2604 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2605 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2606 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2607 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2608 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2609 #endif
2610 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2611 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2612 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2613 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2615 rtx temp, seq;
2616 if (memory_address_p (GET_MODE (x), ad))
2617 return x;
2619 start_sequence ();
2620 temp = copy_to_reg (ad);
2621 seq = get_insns ();
2622 end_sequence ();
2623 emit_insn_before (seq, insn);
2624 return replace_equiv_address (x, temp);
2626 return x;
2629 fmt = GET_RTX_FORMAT (code);
2630 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2632 if (fmt[i] == 'e')
2633 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2634 else if (fmt[i] == 'E')
2636 int j;
2637 for (j = 0; j < XVECLEN (x, i); j++)
2638 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2641 return x;
2644 /* Optimization: a bit-field instruction whose field
2645 happens to be a byte or halfword in memory
2646 can be changed to a move instruction.
2648 We call here when INSN is an insn to examine or store into a bit-field.
2649 BODY is the SET-rtx to be altered.
2651 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2652 (Currently this is called only from function.c, and EQUIV_MEM
2653 is always 0.) */
2655 static void
2656 optimize_bit_field (body, insn, equiv_mem)
2657 rtx body;
2658 rtx insn;
2659 rtx *equiv_mem;
2661 rtx bitfield;
2662 int destflag;
2663 rtx seq = 0;
2664 enum machine_mode mode;
2666 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2667 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2668 bitfield = SET_DEST (body), destflag = 1;
2669 else
2670 bitfield = SET_SRC (body), destflag = 0;
2672 /* First check that the field being stored has constant size and position
2673 and is in fact a byte or halfword suitably aligned. */
2675 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2676 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2677 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2678 != BLKmode)
2679 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2681 rtx memref = 0;
2683 /* Now check that the containing word is memory, not a register,
2684 and that it is safe to change the machine mode. */
2686 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2687 memref = XEXP (bitfield, 0);
2688 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2689 && equiv_mem != 0)
2690 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2691 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2692 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2693 memref = SUBREG_REG (XEXP (bitfield, 0));
2694 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2695 && equiv_mem != 0
2696 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2697 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2699 if (memref
2700 && ! mode_dependent_address_p (XEXP (memref, 0))
2701 && ! MEM_VOLATILE_P (memref))
2703 /* Now adjust the address, first for any subreg'ing
2704 that we are now getting rid of,
2705 and then for which byte of the word is wanted. */
2707 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2708 rtx insns;
2710 /* Adjust OFFSET to count bits from low-address byte. */
2711 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2712 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2713 - offset - INTVAL (XEXP (bitfield, 1)));
2715 /* Adjust OFFSET to count bytes from low-address byte. */
2716 offset /= BITS_PER_UNIT;
2717 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2719 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2720 / UNITS_PER_WORD) * UNITS_PER_WORD;
2721 if (BYTES_BIG_ENDIAN)
2722 offset -= (MIN (UNITS_PER_WORD,
2723 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2724 - MIN (UNITS_PER_WORD,
2725 GET_MODE_SIZE (GET_MODE (memref))));
2728 start_sequence ();
2729 memref = adjust_address (memref, mode, offset);
2730 insns = get_insns ();
2731 end_sequence ();
2732 emit_insn_before (insns, insn);
2734 /* Store this memory reference where
2735 we found the bit field reference. */
2737 if (destflag)
2739 validate_change (insn, &SET_DEST (body), memref, 1);
2740 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2742 rtx src = SET_SRC (body);
2743 while (GET_CODE (src) == SUBREG
2744 && SUBREG_BYTE (src) == 0)
2745 src = SUBREG_REG (src);
2746 if (GET_MODE (src) != GET_MODE (memref))
2747 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2748 validate_change (insn, &SET_SRC (body), src, 1);
2750 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2751 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2752 /* This shouldn't happen because anything that didn't have
2753 one of these modes should have got converted explicitly
2754 and then referenced through a subreg.
2755 This is so because the original bit-field was
2756 handled by agg_mode and so its tree structure had
2757 the same mode that memref now has. */
2758 abort ();
2760 else
2762 rtx dest = SET_DEST (body);
2764 while (GET_CODE (dest) == SUBREG
2765 && SUBREG_BYTE (dest) == 0
2766 && (GET_MODE_CLASS (GET_MODE (dest))
2767 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2768 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2769 <= UNITS_PER_WORD))
2770 dest = SUBREG_REG (dest);
2772 validate_change (insn, &SET_DEST (body), dest, 1);
2774 if (GET_MODE (dest) == GET_MODE (memref))
2775 validate_change (insn, &SET_SRC (body), memref, 1);
2776 else
2778 /* Convert the mem ref to the destination mode. */
2779 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2781 start_sequence ();
2782 convert_move (newreg, memref,
2783 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2784 seq = get_insns ();
2785 end_sequence ();
2787 validate_change (insn, &SET_SRC (body), newreg, 1);
2791 /* See if we can convert this extraction or insertion into
2792 a simple move insn. We might not be able to do so if this
2793 was, for example, part of a PARALLEL.
2795 If we succeed, write out any needed conversions. If we fail,
2796 it is hard to guess why we failed, so don't do anything
2797 special; just let the optimization be suppressed. */
2799 if (apply_change_group () && seq)
2800 emit_insn_before (seq, insn);
2805 /* These routines are responsible for converting virtual register references
2806 to the actual hard register references once RTL generation is complete.
2808 The following four variables are used for communication between the
2809 routines. They contain the offsets of the virtual registers from their
2810 respective hard registers. */
2812 static int in_arg_offset;
2813 static int var_offset;
2814 static int dynamic_offset;
2815 static int out_arg_offset;
2816 static int cfa_offset;
2818 /* In most machines, the stack pointer register is equivalent to the bottom
2819 of the stack. */
2821 #ifndef STACK_POINTER_OFFSET
2822 #define STACK_POINTER_OFFSET 0
2823 #endif
2825 /* If not defined, pick an appropriate default for the offset of dynamically
2826 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2827 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2829 #ifndef STACK_DYNAMIC_OFFSET
2831 /* The bottom of the stack points to the actual arguments. If
2832 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2833 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2834 stack space for register parameters is not pushed by the caller, but
2835 rather part of the fixed stack areas and hence not included in
2836 `current_function_outgoing_args_size'. Nevertheless, we must allow
2837 for it when allocating stack dynamic objects. */
2839 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2840 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2841 ((ACCUMULATE_OUTGOING_ARGS \
2842 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2843 + (STACK_POINTER_OFFSET)) \
2845 #else
2846 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2847 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2848 + (STACK_POINTER_OFFSET))
2849 #endif
2850 #endif
2852 /* On most machines, the CFA coincides with the first incoming parm. */
2854 #ifndef ARG_POINTER_CFA_OFFSET
2855 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2856 #endif
2858 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2859 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2860 register, for later use if we do need to force REG into the stack. REG is
2861 overwritten by the MEM like in put_reg_into_stack. */
2864 gen_mem_addressof (reg, decl)
2865 rtx reg;
2866 tree decl;
2868 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2869 REGNO (reg), decl);
2871 /* Calculate this before we start messing with decl's RTL. */
2872 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2874 /* If the original REG was a user-variable, then so is the REG whose
2875 address is being taken. Likewise for unchanging. */
2876 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2877 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2879 PUT_CODE (reg, MEM);
2880 MEM_ATTRS (reg) = 0;
2881 XEXP (reg, 0) = r;
2883 if (decl)
2885 tree type = TREE_TYPE (decl);
2886 enum machine_mode decl_mode
2887 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2888 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2889 : DECL_RTL_IF_SET (decl));
2891 PUT_MODE (reg, decl_mode);
2893 /* Clear DECL_RTL momentarily so functions below will work
2894 properly, then set it again. */
2895 if (DECL_P (decl) && decl_rtl == reg)
2896 SET_DECL_RTL (decl, 0);
2898 set_mem_attributes (reg, decl, 1);
2899 set_mem_alias_set (reg, set);
2901 if (DECL_P (decl) && decl_rtl == reg)
2902 SET_DECL_RTL (decl, reg);
2904 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2905 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2907 else
2908 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2910 return reg;
2913 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2915 void
2916 flush_addressof (decl)
2917 tree decl;
2919 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2920 && DECL_RTL (decl) != 0
2921 && GET_CODE (DECL_RTL (decl)) == MEM
2922 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2923 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2924 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2927 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2929 static void
2930 put_addressof_into_stack (r, ht)
2931 rtx r;
2932 htab_t ht;
2934 tree decl, type;
2935 int volatile_p, used_p;
2937 rtx reg = XEXP (r, 0);
2939 if (GET_CODE (reg) != REG)
2940 abort ();
2942 decl = ADDRESSOF_DECL (r);
2943 if (decl)
2945 type = TREE_TYPE (decl);
2946 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2947 && TREE_THIS_VOLATILE (decl));
2948 used_p = (TREE_USED (decl)
2949 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2951 else
2953 type = NULL_TREE;
2954 volatile_p = 0;
2955 used_p = 1;
2958 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2959 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2962 /* List of replacements made below in purge_addressof_1 when creating
2963 bitfield insertions. */
2964 static rtx purge_bitfield_addressof_replacements;
2966 /* List of replacements made below in purge_addressof_1 for patterns
2967 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2968 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2969 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2970 enough in complex cases, e.g. when some field values can be
2971 extracted by usage MEM with narrower mode. */
2972 static rtx purge_addressof_replacements;
2974 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2975 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2976 the stack. If the function returns FALSE then the replacement could not
2977 be made. */
2979 static bool
2980 purge_addressof_1 (loc, insn, force, store, ht)
2981 rtx *loc;
2982 rtx insn;
2983 int force, store;
2984 htab_t ht;
2986 rtx x;
2987 RTX_CODE code;
2988 int i, j;
2989 const char *fmt;
2990 bool result = true;
2992 /* Re-start here to avoid recursion in common cases. */
2993 restart:
2995 x = *loc;
2996 if (x == 0)
2997 return true;
2999 code = GET_CODE (x);
3001 /* If we don't return in any of the cases below, we will recurse inside
3002 the RTX, which will normally result in any ADDRESSOF being forced into
3003 memory. */
3004 if (code == SET)
3006 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3007 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3008 return result;
3010 else if (code == ADDRESSOF)
3012 rtx sub, insns;
3014 if (GET_CODE (XEXP (x, 0)) != MEM)
3015 put_addressof_into_stack (x, ht);
3017 /* We must create a copy of the rtx because it was created by
3018 overwriting a REG rtx which is always shared. */
3019 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3020 if (validate_change (insn, loc, sub, 0)
3021 || validate_replace_rtx (x, sub, insn))
3022 return true;
3024 start_sequence ();
3025 sub = force_operand (sub, NULL_RTX);
3026 if (! validate_change (insn, loc, sub, 0)
3027 && ! validate_replace_rtx (x, sub, insn))
3028 abort ();
3030 insns = get_insns ();
3031 end_sequence ();
3032 emit_insn_before (insns, insn);
3033 return true;
3036 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3038 rtx sub = XEXP (XEXP (x, 0), 0);
3040 if (GET_CODE (sub) == MEM)
3041 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3042 else if (GET_CODE (sub) == REG
3043 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3045 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3047 int size_x, size_sub;
3049 if (!insn)
3051 /* When processing REG_NOTES look at the list of
3052 replacements done on the insn to find the register that X
3053 was replaced by. */
3054 rtx tem;
3056 for (tem = purge_bitfield_addressof_replacements;
3057 tem != NULL_RTX;
3058 tem = XEXP (XEXP (tem, 1), 1))
3059 if (rtx_equal_p (x, XEXP (tem, 0)))
3061 *loc = XEXP (XEXP (tem, 1), 0);
3062 return true;
3065 /* See comment for purge_addressof_replacements. */
3066 for (tem = purge_addressof_replacements;
3067 tem != NULL_RTX;
3068 tem = XEXP (XEXP (tem, 1), 1))
3069 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3071 rtx z = XEXP (XEXP (tem, 1), 0);
3073 if (GET_MODE (x) == GET_MODE (z)
3074 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3075 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3076 abort ();
3078 /* It can happen that the note may speak of things
3079 in a wider (or just different) mode than the
3080 code did. This is especially true of
3081 REG_RETVAL. */
3083 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3084 z = SUBREG_REG (z);
3086 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3087 && (GET_MODE_SIZE (GET_MODE (x))
3088 > GET_MODE_SIZE (GET_MODE (z))))
3090 /* This can occur as a result in invalid
3091 pointer casts, e.g. float f; ...
3092 *(long long int *)&f.
3093 ??? We could emit a warning here, but
3094 without a line number that wouldn't be
3095 very helpful. */
3096 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3098 else
3099 z = gen_lowpart (GET_MODE (x), z);
3101 *loc = z;
3102 return true;
3105 /* Sometimes we may not be able to find the replacement. For
3106 example when the original insn was a MEM in a wider mode,
3107 and the note is part of a sign extension of a narrowed
3108 version of that MEM. Gcc testcase compile/990829-1.c can
3109 generate an example of this situation. Rather than complain
3110 we return false, which will prompt our caller to remove the
3111 offending note. */
3112 return false;
3115 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3116 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3118 /* Don't even consider working with paradoxical subregs,
3119 or the moral equivalent seen here. */
3120 if (size_x <= size_sub
3121 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3123 /* Do a bitfield insertion to mirror what would happen
3124 in memory. */
3126 rtx val, seq;
3128 if (store)
3130 rtx p = PREV_INSN (insn);
3132 start_sequence ();
3133 val = gen_reg_rtx (GET_MODE (x));
3134 if (! validate_change (insn, loc, val, 0))
3136 /* Discard the current sequence and put the
3137 ADDRESSOF on stack. */
3138 end_sequence ();
3139 goto give_up;
3141 seq = get_insns ();
3142 end_sequence ();
3143 emit_insn_before (seq, insn);
3144 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3145 insn, ht);
3147 start_sequence ();
3148 store_bit_field (sub, size_x, 0, GET_MODE (x),
3149 val, GET_MODE_SIZE (GET_MODE (sub)));
3151 /* Make sure to unshare any shared rtl that store_bit_field
3152 might have created. */
3153 unshare_all_rtl_again (get_insns ());
3155 seq = get_insns ();
3156 end_sequence ();
3157 p = emit_insn_after (seq, insn);
3158 if (NEXT_INSN (insn))
3159 compute_insns_for_mem (NEXT_INSN (insn),
3160 p ? NEXT_INSN (p) : NULL_RTX,
3161 ht);
3163 else
3165 rtx p = PREV_INSN (insn);
3167 start_sequence ();
3168 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3169 GET_MODE (x), GET_MODE (x),
3170 GET_MODE_SIZE (GET_MODE (sub)));
3172 if (! validate_change (insn, loc, val, 0))
3174 /* Discard the current sequence and put the
3175 ADDRESSOF on stack. */
3176 end_sequence ();
3177 goto give_up;
3180 seq = get_insns ();
3181 end_sequence ();
3182 emit_insn_before (seq, insn);
3183 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3184 insn, ht);
3187 /* Remember the replacement so that the same one can be done
3188 on the REG_NOTES. */
3189 purge_bitfield_addressof_replacements
3190 = gen_rtx_EXPR_LIST (VOIDmode, x,
3191 gen_rtx_EXPR_LIST
3192 (VOIDmode, val,
3193 purge_bitfield_addressof_replacements));
3195 /* We replaced with a reg -- all done. */
3196 return true;
3200 else if (validate_change (insn, loc, sub, 0))
3202 /* Remember the replacement so that the same one can be done
3203 on the REG_NOTES. */
3204 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3206 rtx tem;
3208 for (tem = purge_addressof_replacements;
3209 tem != NULL_RTX;
3210 tem = XEXP (XEXP (tem, 1), 1))
3211 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3213 XEXP (XEXP (tem, 1), 0) = sub;
3214 return true;
3216 purge_addressof_replacements
3217 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3218 gen_rtx_EXPR_LIST (VOIDmode, sub,
3219 purge_addressof_replacements));
3220 return true;
3222 goto restart;
3226 give_up:
3227 /* Scan all subexpressions. */
3228 fmt = GET_RTX_FORMAT (code);
3229 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3231 if (*fmt == 'e')
3232 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3233 else if (*fmt == 'E')
3234 for (j = 0; j < XVECLEN (x, i); j++)
3235 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3238 return result;
3241 /* Return a hash value for K, a REG. */
3243 static hashval_t
3244 insns_for_mem_hash (k)
3245 const void * k;
3247 /* Use the address of the key for the hash value. */
3248 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3249 return htab_hash_pointer (m->key);
3252 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3254 static int
3255 insns_for_mem_comp (k1, k2)
3256 const void * k1;
3257 const void * k2;
3259 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3260 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3261 return m1->key == m2->key;
3264 struct insns_for_mem_walk_info
3266 /* The hash table that we are using to record which INSNs use which
3267 MEMs. */
3268 htab_t ht;
3270 /* The INSN we are currently processing. */
3271 rtx insn;
3273 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3274 to find the insns that use the REGs in the ADDRESSOFs. */
3275 int pass;
3278 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3279 that might be used in an ADDRESSOF expression, record this INSN in
3280 the hash table given by DATA (which is really a pointer to an
3281 insns_for_mem_walk_info structure). */
3283 static int
3284 insns_for_mem_walk (r, data)
3285 rtx *r;
3286 void *data;
3288 struct insns_for_mem_walk_info *ifmwi
3289 = (struct insns_for_mem_walk_info *) data;
3290 struct insns_for_mem_entry tmp;
3291 tmp.insns = NULL_RTX;
3293 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3294 && GET_CODE (XEXP (*r, 0)) == REG)
3296 PTR *e;
3297 tmp.key = XEXP (*r, 0);
3298 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3299 if (*e == NULL)
3301 *e = ggc_alloc (sizeof (tmp));
3302 memcpy (*e, &tmp, sizeof (tmp));
3305 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3307 struct insns_for_mem_entry *ifme;
3308 tmp.key = *r;
3309 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3311 /* If we have not already recorded this INSN, do so now. Since
3312 we process the INSNs in order, we know that if we have
3313 recorded it it must be at the front of the list. */
3314 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3315 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3316 ifme->insns);
3319 return 0;
3322 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3323 which REGs in HT. */
3325 static void
3326 compute_insns_for_mem (insns, last_insn, ht)
3327 rtx insns;
3328 rtx last_insn;
3329 htab_t ht;
3331 rtx insn;
3332 struct insns_for_mem_walk_info ifmwi;
3333 ifmwi.ht = ht;
3335 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3336 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3337 if (INSN_P (insn))
3339 ifmwi.insn = insn;
3340 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3344 /* Helper function for purge_addressof called through for_each_rtx.
3345 Returns true iff the rtl is an ADDRESSOF. */
3347 static int
3348 is_addressof (rtl, data)
3349 rtx *rtl;
3350 void *data ATTRIBUTE_UNUSED;
3352 return GET_CODE (*rtl) == ADDRESSOF;
3355 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3356 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3357 stack. */
3359 void
3360 purge_addressof (insns)
3361 rtx insns;
3363 rtx insn;
3364 htab_t ht;
3366 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3367 requires a fixup pass over the instruction stream to correct
3368 INSNs that depended on the REG being a REG, and not a MEM. But,
3369 these fixup passes are slow. Furthermore, most MEMs are not
3370 mentioned in very many instructions. So, we speed up the process
3371 by pre-calculating which REGs occur in which INSNs; that allows
3372 us to perform the fixup passes much more quickly. */
3373 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3374 compute_insns_for_mem (insns, NULL_RTX, ht);
3376 for (insn = insns; insn; insn = NEXT_INSN (insn))
3377 if (INSN_P (insn))
3379 if (! purge_addressof_1 (&PATTERN (insn), insn,
3380 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3381 /* If we could not replace the ADDRESSOFs in the insn,
3382 something is wrong. */
3383 abort ();
3385 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, ht))
3387 /* If we could not replace the ADDRESSOFs in the insn's notes,
3388 we can just remove the offending notes instead. */
3389 rtx note;
3391 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3393 /* If we find a REG_RETVAL note then the insn is a libcall.
3394 Such insns must have REG_EQUAL notes as well, in order
3395 for later passes of the compiler to work. So it is not
3396 safe to delete the notes here, and instead we abort. */
3397 if (REG_NOTE_KIND (note) == REG_RETVAL)
3398 abort ();
3399 if (for_each_rtx (&note, is_addressof, NULL))
3400 remove_note (insn, note);
3405 /* Clean up. */
3406 purge_bitfield_addressof_replacements = 0;
3407 purge_addressof_replacements = 0;
3409 /* REGs are shared. purge_addressof will destructively replace a REG
3410 with a MEM, which creates shared MEMs.
3412 Unfortunately, the children of put_reg_into_stack assume that MEMs
3413 referring to the same stack slot are shared (fixup_var_refs and
3414 the associated hash table code).
3416 So, we have to do another unsharing pass after we have flushed any
3417 REGs that had their address taken into the stack.
3419 It may be worth tracking whether or not we converted any REGs into
3420 MEMs to avoid this overhead when it is not needed. */
3421 unshare_all_rtl_again (get_insns ());
3424 /* Convert a SET of a hard subreg to a set of the appropriate hard
3425 register. A subroutine of purge_hard_subreg_sets. */
3427 static void
3428 purge_single_hard_subreg_set (pattern)
3429 rtx pattern;
3431 rtx reg = SET_DEST (pattern);
3432 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3433 int offset = 0;
3435 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3436 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3438 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3439 GET_MODE (SUBREG_REG (reg)),
3440 SUBREG_BYTE (reg),
3441 GET_MODE (reg));
3442 reg = SUBREG_REG (reg);
3446 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3448 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3449 SET_DEST (pattern) = reg;
3453 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3454 only such SETs that we expect to see are those left in because
3455 integrate can't handle sets of parts of a return value register.
3457 We don't use alter_subreg because we only want to eliminate subregs
3458 of hard registers. */
3460 void
3461 purge_hard_subreg_sets (insn)
3462 rtx insn;
3464 for (; insn; insn = NEXT_INSN (insn))
3466 if (INSN_P (insn))
3468 rtx pattern = PATTERN (insn);
3469 switch (GET_CODE (pattern))
3471 case SET:
3472 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3473 purge_single_hard_subreg_set (pattern);
3474 break;
3475 case PARALLEL:
3477 int j;
3478 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3480 rtx inner_pattern = XVECEXP (pattern, 0, j);
3481 if (GET_CODE (inner_pattern) == SET
3482 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3483 purge_single_hard_subreg_set (inner_pattern);
3486 break;
3487 default:
3488 break;
3494 /* Pass through the INSNS of function FNDECL and convert virtual register
3495 references to hard register references. */
3497 void
3498 instantiate_virtual_regs (fndecl, insns)
3499 tree fndecl;
3500 rtx insns;
3502 rtx insn;
3503 unsigned int i;
3505 /* Compute the offsets to use for this function. */
3506 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3507 var_offset = STARTING_FRAME_OFFSET;
3508 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3509 out_arg_offset = STACK_POINTER_OFFSET;
3510 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3512 /* Scan all variables and parameters of this function. For each that is
3513 in memory, instantiate all virtual registers if the result is a valid
3514 address. If not, we do it later. That will handle most uses of virtual
3515 regs on many machines. */
3516 instantiate_decls (fndecl, 1);
3518 /* Initialize recognition, indicating that volatile is OK. */
3519 init_recog ();
3521 /* Scan through all the insns, instantiating every virtual register still
3522 present. */
3523 for (insn = insns; insn; insn = NEXT_INSN (insn))
3524 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3525 || GET_CODE (insn) == CALL_INSN)
3527 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3528 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3529 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3530 if (GET_CODE (insn) == CALL_INSN)
3531 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3532 NULL_RTX, 0);
3535 /* Instantiate the stack slots for the parm registers, for later use in
3536 addressof elimination. */
3537 for (i = 0; i < max_parm_reg; ++i)
3538 if (parm_reg_stack_loc[i])
3539 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3541 /* Now instantiate the remaining register equivalences for debugging info.
3542 These will not be valid addresses. */
3543 instantiate_decls (fndecl, 0);
3545 /* Indicate that, from now on, assign_stack_local should use
3546 frame_pointer_rtx. */
3547 virtuals_instantiated = 1;
3550 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3551 all virtual registers in their DECL_RTL's.
3553 If VALID_ONLY, do this only if the resulting address is still valid.
3554 Otherwise, always do it. */
3556 static void
3557 instantiate_decls (fndecl, valid_only)
3558 tree fndecl;
3559 int valid_only;
3561 tree decl;
3563 /* Process all parameters of the function. */
3564 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3566 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3567 HOST_WIDE_INT size_rtl;
3569 instantiate_decl (DECL_RTL (decl), size, valid_only);
3571 /* If the parameter was promoted, then the incoming RTL mode may be
3572 larger than the declared type size. We must use the larger of
3573 the two sizes. */
3574 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3575 size = MAX (size_rtl, size);
3576 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3579 /* Now process all variables defined in the function or its subblocks. */
3580 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3583 /* Subroutine of instantiate_decls: Process all decls in the given
3584 BLOCK node and all its subblocks. */
3586 static void
3587 instantiate_decls_1 (let, valid_only)
3588 tree let;
3589 int valid_only;
3591 tree t;
3593 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3594 if (DECL_RTL_SET_P (t))
3595 instantiate_decl (DECL_RTL (t),
3596 int_size_in_bytes (TREE_TYPE (t)),
3597 valid_only);
3599 /* Process all subblocks. */
3600 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3601 instantiate_decls_1 (t, valid_only);
3604 /* Subroutine of the preceding procedures: Given RTL representing a
3605 decl and the size of the object, do any instantiation required.
3607 If VALID_ONLY is nonzero, it means that the RTL should only be
3608 changed if the new address is valid. */
3610 static void
3611 instantiate_decl (x, size, valid_only)
3612 rtx x;
3613 HOST_WIDE_INT size;
3614 int valid_only;
3616 enum machine_mode mode;
3617 rtx addr;
3619 /* If this is not a MEM, no need to do anything. Similarly if the
3620 address is a constant or a register that is not a virtual register. */
3622 if (x == 0 || GET_CODE (x) != MEM)
3623 return;
3625 addr = XEXP (x, 0);
3626 if (CONSTANT_P (addr)
3627 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3628 || (GET_CODE (addr) == REG
3629 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3630 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3631 return;
3633 /* If we should only do this if the address is valid, copy the address.
3634 We need to do this so we can undo any changes that might make the
3635 address invalid. This copy is unfortunate, but probably can't be
3636 avoided. */
3638 if (valid_only)
3639 addr = copy_rtx (addr);
3641 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3643 if (valid_only && size >= 0)
3645 unsigned HOST_WIDE_INT decl_size = size;
3647 /* Now verify that the resulting address is valid for every integer or
3648 floating-point mode up to and including SIZE bytes long. We do this
3649 since the object might be accessed in any mode and frame addresses
3650 are shared. */
3652 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3653 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3654 mode = GET_MODE_WIDER_MODE (mode))
3655 if (! memory_address_p (mode, addr))
3656 return;
3658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3659 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3660 mode = GET_MODE_WIDER_MODE (mode))
3661 if (! memory_address_p (mode, addr))
3662 return;
3665 /* Put back the address now that we have updated it and we either know
3666 it is valid or we don't care whether it is valid. */
3668 XEXP (x, 0) = addr;
3671 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3672 is a virtual register, return the equivalent hard register and set the
3673 offset indirectly through the pointer. Otherwise, return 0. */
3675 static rtx
3676 instantiate_new_reg (x, poffset)
3677 rtx x;
3678 HOST_WIDE_INT *poffset;
3680 rtx new;
3681 HOST_WIDE_INT offset;
3683 if (x == virtual_incoming_args_rtx)
3684 new = arg_pointer_rtx, offset = in_arg_offset;
3685 else if (x == virtual_stack_vars_rtx)
3686 new = frame_pointer_rtx, offset = var_offset;
3687 else if (x == virtual_stack_dynamic_rtx)
3688 new = stack_pointer_rtx, offset = dynamic_offset;
3689 else if (x == virtual_outgoing_args_rtx)
3690 new = stack_pointer_rtx, offset = out_arg_offset;
3691 else if (x == virtual_cfa_rtx)
3692 new = arg_pointer_rtx, offset = cfa_offset;
3693 else
3694 return 0;
3696 *poffset = offset;
3697 return new;
3700 /* Given a pointer to a piece of rtx and an optional pointer to the
3701 containing object, instantiate any virtual registers present in it.
3703 If EXTRA_INSNS, we always do the replacement and generate
3704 any extra insns before OBJECT. If it zero, we do nothing if replacement
3705 is not valid.
3707 Return 1 if we either had nothing to do or if we were able to do the
3708 needed replacement. Return 0 otherwise; we only return zero if
3709 EXTRA_INSNS is zero.
3711 We first try some simple transformations to avoid the creation of extra
3712 pseudos. */
3714 static int
3715 instantiate_virtual_regs_1 (loc, object, extra_insns)
3716 rtx *loc;
3717 rtx object;
3718 int extra_insns;
3720 rtx x;
3721 RTX_CODE code;
3722 rtx new = 0;
3723 HOST_WIDE_INT offset = 0;
3724 rtx temp;
3725 rtx seq;
3726 int i, j;
3727 const char *fmt;
3729 /* Re-start here to avoid recursion in common cases. */
3730 restart:
3732 x = *loc;
3733 if (x == 0)
3734 return 1;
3736 code = GET_CODE (x);
3738 /* Check for some special cases. */
3739 switch (code)
3741 case CONST_INT:
3742 case CONST_DOUBLE:
3743 case CONST_VECTOR:
3744 case CONST:
3745 case SYMBOL_REF:
3746 case CODE_LABEL:
3747 case PC:
3748 case CC0:
3749 case ASM_INPUT:
3750 case ADDR_VEC:
3751 case ADDR_DIFF_VEC:
3752 case RETURN:
3753 return 1;
3755 case SET:
3756 /* We are allowed to set the virtual registers. This means that
3757 the actual register should receive the source minus the
3758 appropriate offset. This is used, for example, in the handling
3759 of non-local gotos. */
3760 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3762 rtx src = SET_SRC (x);
3764 /* We are setting the register, not using it, so the relevant
3765 offset is the negative of the offset to use were we using
3766 the register. */
3767 offset = - offset;
3768 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3770 /* The only valid sources here are PLUS or REG. Just do
3771 the simplest possible thing to handle them. */
3772 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3773 abort ();
3775 start_sequence ();
3776 if (GET_CODE (src) != REG)
3777 temp = force_operand (src, NULL_RTX);
3778 else
3779 temp = src;
3780 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3781 seq = get_insns ();
3782 end_sequence ();
3784 emit_insn_before (seq, object);
3785 SET_DEST (x) = new;
3787 if (! validate_change (object, &SET_SRC (x), temp, 0)
3788 || ! extra_insns)
3789 abort ();
3791 return 1;
3794 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3795 loc = &SET_SRC (x);
3796 goto restart;
3798 case PLUS:
3799 /* Handle special case of virtual register plus constant. */
3800 if (CONSTANT_P (XEXP (x, 1)))
3802 rtx old, new_offset;
3804 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3805 if (GET_CODE (XEXP (x, 0)) == PLUS)
3807 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3809 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3810 extra_insns);
3811 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3813 else
3815 loc = &XEXP (x, 0);
3816 goto restart;
3820 #ifdef POINTERS_EXTEND_UNSIGNED
3821 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3822 we can commute the PLUS and SUBREG because pointers into the
3823 frame are well-behaved. */
3824 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3825 && GET_CODE (XEXP (x, 1)) == CONST_INT
3826 && 0 != (new
3827 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3828 &offset))
3829 && validate_change (object, loc,
3830 plus_constant (gen_lowpart (ptr_mode,
3831 new),
3832 offset
3833 + INTVAL (XEXP (x, 1))),
3835 return 1;
3836 #endif
3837 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3839 /* We know the second operand is a constant. Unless the
3840 first operand is a REG (which has been already checked),
3841 it needs to be checked. */
3842 if (GET_CODE (XEXP (x, 0)) != REG)
3844 loc = &XEXP (x, 0);
3845 goto restart;
3847 return 1;
3850 new_offset = plus_constant (XEXP (x, 1), offset);
3852 /* If the new constant is zero, try to replace the sum with just
3853 the register. */
3854 if (new_offset == const0_rtx
3855 && validate_change (object, loc, new, 0))
3856 return 1;
3858 /* Next try to replace the register and new offset.
3859 There are two changes to validate here and we can't assume that
3860 in the case of old offset equals new just changing the register
3861 will yield a valid insn. In the interests of a little efficiency,
3862 however, we only call validate change once (we don't queue up the
3863 changes and then call apply_change_group). */
3865 old = XEXP (x, 0);
3866 if (offset == 0
3867 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3868 : (XEXP (x, 0) = new,
3869 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3871 if (! extra_insns)
3873 XEXP (x, 0) = old;
3874 return 0;
3877 /* Otherwise copy the new constant into a register and replace
3878 constant with that register. */
3879 temp = gen_reg_rtx (Pmode);
3880 XEXP (x, 0) = new;
3881 if (validate_change (object, &XEXP (x, 1), temp, 0))
3882 emit_insn_before (gen_move_insn (temp, new_offset), object);
3883 else
3885 /* If that didn't work, replace this expression with a
3886 register containing the sum. */
3888 XEXP (x, 0) = old;
3889 new = gen_rtx_PLUS (Pmode, new, new_offset);
3891 start_sequence ();
3892 temp = force_operand (new, NULL_RTX);
3893 seq = get_insns ();
3894 end_sequence ();
3896 emit_insn_before (seq, object);
3897 if (! validate_change (object, loc, temp, 0)
3898 && ! validate_replace_rtx (x, temp, object))
3899 abort ();
3903 return 1;
3906 /* Fall through to generic two-operand expression case. */
3907 case EXPR_LIST:
3908 case CALL:
3909 case COMPARE:
3910 case MINUS:
3911 case MULT:
3912 case DIV: case UDIV:
3913 case MOD: case UMOD:
3914 case AND: case IOR: case XOR:
3915 case ROTATERT: case ROTATE:
3916 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3917 case NE: case EQ:
3918 case GE: case GT: case GEU: case GTU:
3919 case LE: case LT: case LEU: case LTU:
3920 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3921 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3922 loc = &XEXP (x, 0);
3923 goto restart;
3925 case MEM:
3926 /* Most cases of MEM that convert to valid addresses have already been
3927 handled by our scan of decls. The only special handling we
3928 need here is to make a copy of the rtx to ensure it isn't being
3929 shared if we have to change it to a pseudo.
3931 If the rtx is a simple reference to an address via a virtual register,
3932 it can potentially be shared. In such cases, first try to make it
3933 a valid address, which can also be shared. Otherwise, copy it and
3934 proceed normally.
3936 First check for common cases that need no processing. These are
3937 usually due to instantiation already being done on a previous instance
3938 of a shared rtx. */
3940 temp = XEXP (x, 0);
3941 if (CONSTANT_ADDRESS_P (temp)
3942 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3943 || temp == arg_pointer_rtx
3944 #endif
3945 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3946 || temp == hard_frame_pointer_rtx
3947 #endif
3948 || temp == frame_pointer_rtx)
3949 return 1;
3951 if (GET_CODE (temp) == PLUS
3952 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3953 && (XEXP (temp, 0) == frame_pointer_rtx
3954 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3955 || XEXP (temp, 0) == hard_frame_pointer_rtx
3956 #endif
3957 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3958 || XEXP (temp, 0) == arg_pointer_rtx
3959 #endif
3961 return 1;
3963 if (temp == virtual_stack_vars_rtx
3964 || temp == virtual_incoming_args_rtx
3965 || (GET_CODE (temp) == PLUS
3966 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3967 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3968 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3970 /* This MEM may be shared. If the substitution can be done without
3971 the need to generate new pseudos, we want to do it in place
3972 so all copies of the shared rtx benefit. The call below will
3973 only make substitutions if the resulting address is still
3974 valid.
3976 Note that we cannot pass X as the object in the recursive call
3977 since the insn being processed may not allow all valid
3978 addresses. However, if we were not passed on object, we can
3979 only modify X without copying it if X will have a valid
3980 address.
3982 ??? Also note that this can still lose if OBJECT is an insn that
3983 has less restrictions on an address that some other insn.
3984 In that case, we will modify the shared address. This case
3985 doesn't seem very likely, though. One case where this could
3986 happen is in the case of a USE or CLOBBER reference, but we
3987 take care of that below. */
3989 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3990 object ? object : x, 0))
3991 return 1;
3993 /* Otherwise make a copy and process that copy. We copy the entire
3994 RTL expression since it might be a PLUS which could also be
3995 shared. */
3996 *loc = x = copy_rtx (x);
3999 /* Fall through to generic unary operation case. */
4000 case PREFETCH:
4001 case SUBREG:
4002 case STRICT_LOW_PART:
4003 case NEG: case NOT:
4004 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4005 case SIGN_EXTEND: case ZERO_EXTEND:
4006 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4007 case FLOAT: case FIX:
4008 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4009 case ABS:
4010 case SQRT:
4011 case FFS:
4012 case CLZ: case CTZ:
4013 case POPCOUNT: case PARITY:
4014 /* These case either have just one operand or we know that we need not
4015 check the rest of the operands. */
4016 loc = &XEXP (x, 0);
4017 goto restart;
4019 case USE:
4020 case CLOBBER:
4021 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4022 go ahead and make the invalid one, but do it to a copy. For a REG,
4023 just make the recursive call, since there's no chance of a problem. */
4025 if ((GET_CODE (XEXP (x, 0)) == MEM
4026 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4028 || (GET_CODE (XEXP (x, 0)) == REG
4029 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4030 return 1;
4032 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4033 loc = &XEXP (x, 0);
4034 goto restart;
4036 case REG:
4037 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4038 in front of this insn and substitute the temporary. */
4039 if ((new = instantiate_new_reg (x, &offset)) != 0)
4041 temp = plus_constant (new, offset);
4042 if (!validate_change (object, loc, temp, 0))
4044 if (! extra_insns)
4045 return 0;
4047 start_sequence ();
4048 temp = force_operand (temp, NULL_RTX);
4049 seq = get_insns ();
4050 end_sequence ();
4052 emit_insn_before (seq, object);
4053 if (! validate_change (object, loc, temp, 0)
4054 && ! validate_replace_rtx (x, temp, object))
4055 abort ();
4059 return 1;
4061 case ADDRESSOF:
4062 if (GET_CODE (XEXP (x, 0)) == REG)
4063 return 1;
4065 else if (GET_CODE (XEXP (x, 0)) == MEM)
4067 /* If we have a (addressof (mem ..)), do any instantiation inside
4068 since we know we'll be making the inside valid when we finally
4069 remove the ADDRESSOF. */
4070 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4071 return 1;
4073 break;
4075 default:
4076 break;
4079 /* Scan all subexpressions. */
4080 fmt = GET_RTX_FORMAT (code);
4081 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4082 if (*fmt == 'e')
4084 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4085 return 0;
4087 else if (*fmt == 'E')
4088 for (j = 0; j < XVECLEN (x, i); j++)
4089 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4090 extra_insns))
4091 return 0;
4093 return 1;
4096 /* Optimization: assuming this function does not receive nonlocal gotos,
4097 delete the handlers for such, as well as the insns to establish
4098 and disestablish them. */
4100 static void
4101 delete_handlers ()
4103 rtx insn;
4104 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4106 /* Delete the handler by turning off the flag that would
4107 prevent jump_optimize from deleting it.
4108 Also permit deletion of the nonlocal labels themselves
4109 if nothing local refers to them. */
4110 if (GET_CODE (insn) == CODE_LABEL)
4112 tree t, last_t;
4114 LABEL_PRESERVE_P (insn) = 0;
4116 /* Remove it from the nonlocal_label list, to avoid confusing
4117 flow. */
4118 for (t = nonlocal_labels, last_t = 0; t;
4119 last_t = t, t = TREE_CHAIN (t))
4120 if (DECL_RTL (TREE_VALUE (t)) == insn)
4121 break;
4122 if (t)
4124 if (! last_t)
4125 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4126 else
4127 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4130 if (GET_CODE (insn) == INSN)
4132 int can_delete = 0;
4133 rtx t;
4134 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4135 if (reg_mentioned_p (t, PATTERN (insn)))
4137 can_delete = 1;
4138 break;
4140 if (can_delete
4141 || (nonlocal_goto_stack_level != 0
4142 && reg_mentioned_p (nonlocal_goto_stack_level,
4143 PATTERN (insn))))
4144 delete_related_insns (insn);
4149 /* Return the first insn following those generated by `assign_parms'. */
4152 get_first_nonparm_insn ()
4154 if (last_parm_insn)
4155 return NEXT_INSN (last_parm_insn);
4156 return get_insns ();
4159 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4160 This means a type for which function calls must pass an address to the
4161 function or get an address back from the function.
4162 EXP may be a type node or an expression (whose type is tested). */
4165 aggregate_value_p (exp)
4166 tree exp;
4168 int i, regno, nregs;
4169 rtx reg;
4171 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4173 if (TREE_CODE (type) == VOID_TYPE)
4174 return 0;
4175 if (RETURN_IN_MEMORY (type))
4176 return 1;
4177 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4178 and thus can't be returned in registers. */
4179 if (TREE_ADDRESSABLE (type))
4180 return 1;
4181 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4182 return 1;
4183 /* Make sure we have suitable call-clobbered regs to return
4184 the value in; if not, we must return it in memory. */
4185 reg = hard_function_value (type, 0, 0);
4187 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4188 it is OK. */
4189 if (GET_CODE (reg) != REG)
4190 return 0;
4192 regno = REGNO (reg);
4193 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4194 for (i = 0; i < nregs; i++)
4195 if (! call_used_regs[regno + i])
4196 return 1;
4197 return 0;
4200 /* Assign RTL expressions to the function's parameters.
4201 This may involve copying them into registers and using
4202 those registers as the RTL for them. */
4204 void
4205 assign_parms (fndecl)
4206 tree fndecl;
4208 tree parm;
4209 rtx entry_parm = 0;
4210 rtx stack_parm = 0;
4211 CUMULATIVE_ARGS args_so_far;
4212 enum machine_mode promoted_mode, passed_mode;
4213 enum machine_mode nominal_mode, promoted_nominal_mode;
4214 int unsignedp;
4215 /* Total space needed so far for args on the stack,
4216 given as a constant and a tree-expression. */
4217 struct args_size stack_args_size;
4218 tree fntype = TREE_TYPE (fndecl);
4219 tree fnargs = DECL_ARGUMENTS (fndecl);
4220 /* This is used for the arg pointer when referring to stack args. */
4221 rtx internal_arg_pointer;
4222 /* This is a dummy PARM_DECL that we used for the function result if
4223 the function returns a structure. */
4224 tree function_result_decl = 0;
4225 #ifdef SETUP_INCOMING_VARARGS
4226 int varargs_setup = 0;
4227 #endif
4228 rtx conversion_insns = 0;
4229 struct args_size alignment_pad;
4231 /* Nonzero if function takes extra anonymous args.
4232 This means the last named arg must be on the stack
4233 right before the anonymous ones. */
4234 int stdarg
4235 = (TYPE_ARG_TYPES (fntype) != 0
4236 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4237 != void_type_node));
4239 current_function_stdarg = stdarg;
4241 /* If the reg that the virtual arg pointer will be translated into is
4242 not a fixed reg or is the stack pointer, make a copy of the virtual
4243 arg pointer, and address parms via the copy. The frame pointer is
4244 considered fixed even though it is not marked as such.
4246 The second time through, simply use ap to avoid generating rtx. */
4248 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4249 || ! (fixed_regs[ARG_POINTER_REGNUM]
4250 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4251 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4252 else
4253 internal_arg_pointer = virtual_incoming_args_rtx;
4254 current_function_internal_arg_pointer = internal_arg_pointer;
4256 stack_args_size.constant = 0;
4257 stack_args_size.var = 0;
4259 /* If struct value address is treated as the first argument, make it so. */
4260 if (aggregate_value_p (DECL_RESULT (fndecl))
4261 && ! current_function_returns_pcc_struct
4262 && struct_value_incoming_rtx == 0)
4264 tree type = build_pointer_type (TREE_TYPE (fntype));
4266 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4268 DECL_ARG_TYPE (function_result_decl) = type;
4269 TREE_CHAIN (function_result_decl) = fnargs;
4270 fnargs = function_result_decl;
4273 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4274 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4276 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4277 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4278 #else
4279 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl);
4280 #endif
4282 /* We haven't yet found an argument that we must push and pretend the
4283 caller did. */
4284 current_function_pretend_args_size = 0;
4286 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4288 struct args_size stack_offset;
4289 struct args_size arg_size;
4290 int passed_pointer = 0;
4291 int did_conversion = 0;
4292 tree passed_type = DECL_ARG_TYPE (parm);
4293 tree nominal_type = TREE_TYPE (parm);
4294 int pretend_named;
4295 int last_named = 0, named_arg;
4297 /* Set LAST_NAMED if this is last named arg before last
4298 anonymous args. */
4299 if (stdarg)
4301 tree tem;
4303 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4304 if (DECL_NAME (tem))
4305 break;
4307 if (tem == 0)
4308 last_named = 1;
4310 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4311 most machines, if this is a varargs/stdarg function, then we treat
4312 the last named arg as if it were anonymous too. */
4313 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4315 if (TREE_TYPE (parm) == error_mark_node
4316 /* This can happen after weird syntax errors
4317 or if an enum type is defined among the parms. */
4318 || TREE_CODE (parm) != PARM_DECL
4319 || passed_type == NULL)
4321 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4322 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4323 TREE_USED (parm) = 1;
4324 continue;
4327 /* Find mode of arg as it is passed, and mode of arg
4328 as it should be during execution of this function. */
4329 passed_mode = TYPE_MODE (passed_type);
4330 nominal_mode = TYPE_MODE (nominal_type);
4332 /* If the parm's mode is VOID, its value doesn't matter,
4333 and avoid the usual things like emit_move_insn that could crash. */
4334 if (nominal_mode == VOIDmode)
4336 SET_DECL_RTL (parm, const0_rtx);
4337 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4338 continue;
4341 /* If the parm is to be passed as a transparent union, use the
4342 type of the first field for the tests below. We have already
4343 verified that the modes are the same. */
4344 if (DECL_TRANSPARENT_UNION (parm)
4345 || (TREE_CODE (passed_type) == UNION_TYPE
4346 && TYPE_TRANSPARENT_UNION (passed_type)))
4347 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4349 /* See if this arg was passed by invisible reference. It is if
4350 it is an object whose size depends on the contents of the
4351 object itself or if the machine requires these objects be passed
4352 that way. */
4354 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4355 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4356 || TREE_ADDRESSABLE (passed_type)
4357 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4358 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4359 passed_type, named_arg)
4360 #endif
4363 passed_type = nominal_type = build_pointer_type (passed_type);
4364 passed_pointer = 1;
4365 passed_mode = nominal_mode = Pmode;
4367 /* See if the frontend wants to pass this by invisible reference. */
4368 else if (passed_type != nominal_type
4369 && POINTER_TYPE_P (passed_type)
4370 && TREE_TYPE (passed_type) == nominal_type)
4372 nominal_type = passed_type;
4373 passed_pointer = 1;
4374 passed_mode = nominal_mode = Pmode;
4377 promoted_mode = passed_mode;
4379 #ifdef PROMOTE_FUNCTION_ARGS
4380 /* Compute the mode in which the arg is actually extended to. */
4381 unsignedp = TREE_UNSIGNED (passed_type);
4382 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4383 #endif
4385 /* Let machine desc say which reg (if any) the parm arrives in.
4386 0 means it arrives on the stack. */
4387 #ifdef FUNCTION_INCOMING_ARG
4388 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4389 passed_type, named_arg);
4390 #else
4391 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4392 passed_type, named_arg);
4393 #endif
4395 if (entry_parm == 0)
4396 promoted_mode = passed_mode;
4398 #ifdef SETUP_INCOMING_VARARGS
4399 /* If this is the last named parameter, do any required setup for
4400 varargs or stdargs. We need to know about the case of this being an
4401 addressable type, in which case we skip the registers it
4402 would have arrived in.
4404 For stdargs, LAST_NAMED will be set for two parameters, the one that
4405 is actually the last named, and the dummy parameter. We only
4406 want to do this action once.
4408 Also, indicate when RTL generation is to be suppressed. */
4409 if (last_named && !varargs_setup)
4411 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4412 current_function_pretend_args_size, 0);
4413 varargs_setup = 1;
4415 #endif
4417 /* Determine parm's home in the stack,
4418 in case it arrives in the stack or we should pretend it did.
4420 Compute the stack position and rtx where the argument arrives
4421 and its size.
4423 There is one complexity here: If this was a parameter that would
4424 have been passed in registers, but wasn't only because it is
4425 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4426 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4427 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4428 0 as it was the previous time. */
4430 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4431 locate_and_pad_parm (promoted_mode, passed_type,
4432 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4434 #else
4435 #ifdef FUNCTION_INCOMING_ARG
4436 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4437 passed_type,
4438 pretend_named) != 0,
4439 #else
4440 FUNCTION_ARG (args_so_far, promoted_mode,
4441 passed_type,
4442 pretend_named) != 0,
4443 #endif
4444 #endif
4445 fndecl, &stack_args_size, &stack_offset, &arg_size,
4446 &alignment_pad);
4449 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4451 if (offset_rtx == const0_rtx)
4452 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4453 else
4454 stack_parm = gen_rtx_MEM (promoted_mode,
4455 gen_rtx_PLUS (Pmode,
4456 internal_arg_pointer,
4457 offset_rtx));
4459 set_mem_attributes (stack_parm, parm, 1);
4461 /* Set also REG_ATTRS if parameter was passed in a register. */
4462 if (entry_parm)
4463 set_reg_attrs_for_parm (entry_parm, stack_parm);
4466 /* If this parameter was passed both in registers and in the stack,
4467 use the copy on the stack. */
4468 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4469 entry_parm = 0;
4471 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4472 /* If this parm was passed part in regs and part in memory,
4473 pretend it arrived entirely in memory
4474 by pushing the register-part onto the stack.
4476 In the special case of a DImode or DFmode that is split,
4477 we could put it together in a pseudoreg directly,
4478 but for now that's not worth bothering with. */
4480 if (entry_parm)
4482 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4483 passed_type, named_arg);
4485 if (nregs > 0)
4487 #if defined (REG_PARM_STACK_SPACE) && !defined (MAYBE_REG_PARM_STACK_SPACE)
4488 /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4489 split parameters was allocated by our caller, so we
4490 won't be pushing it in the prolog. */
4491 if (REG_PARM_STACK_SPACE (fndecl) == 0)
4492 #endif
4493 current_function_pretend_args_size
4494 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4495 / (PARM_BOUNDARY / BITS_PER_UNIT)
4496 * (PARM_BOUNDARY / BITS_PER_UNIT));
4498 /* Handle calls that pass values in multiple non-contiguous
4499 locations. The Irix 6 ABI has examples of this. */
4500 if (GET_CODE (entry_parm) == PARALLEL)
4501 emit_group_store (validize_mem (stack_parm), entry_parm,
4502 int_size_in_bytes (TREE_TYPE (parm)));
4504 else
4505 move_block_from_reg (REGNO (entry_parm),
4506 validize_mem (stack_parm), nregs,
4507 int_size_in_bytes (TREE_TYPE (parm)));
4509 entry_parm = stack_parm;
4512 #endif
4514 /* If we didn't decide this parm came in a register,
4515 by default it came on the stack. */
4516 if (entry_parm == 0)
4517 entry_parm = stack_parm;
4519 /* Record permanently how this parm was passed. */
4520 DECL_INCOMING_RTL (parm) = entry_parm;
4522 /* If there is actually space on the stack for this parm,
4523 count it in stack_args_size; otherwise set stack_parm to 0
4524 to indicate there is no preallocated stack slot for the parm. */
4526 if (entry_parm == stack_parm
4527 || (GET_CODE (entry_parm) == PARALLEL
4528 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4529 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4530 /* On some machines, even if a parm value arrives in a register
4531 there is still an (uninitialized) stack slot allocated for it.
4533 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4534 whether this parameter already has a stack slot allocated,
4535 because an arg block exists only if current_function_args_size
4536 is larger than some threshold, and we haven't calculated that
4537 yet. So, for now, we just assume that stack slots never exist
4538 in this case. */
4539 || REG_PARM_STACK_SPACE (fndecl) > 0
4540 #endif
4543 stack_args_size.constant += arg_size.constant;
4544 if (arg_size.var)
4545 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4547 else
4548 /* No stack slot was pushed for this parm. */
4549 stack_parm = 0;
4551 /* Update info on where next arg arrives in registers. */
4553 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4554 passed_type, named_arg);
4556 /* If we can't trust the parm stack slot to be aligned enough
4557 for its ultimate type, don't use that slot after entry.
4558 We'll make another stack slot, if we need one. */
4560 unsigned int thisparm_boundary
4561 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4563 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4564 stack_parm = 0;
4567 /* If parm was passed in memory, and we need to convert it on entry,
4568 don't store it back in that same slot. */
4569 if (entry_parm != 0
4570 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4571 stack_parm = 0;
4573 /* When an argument is passed in multiple locations, we can't
4574 make use of this information, but we can save some copying if
4575 the whole argument is passed in a single register. */
4576 if (GET_CODE (entry_parm) == PARALLEL
4577 && nominal_mode != BLKmode && passed_mode != BLKmode)
4579 int i, len = XVECLEN (entry_parm, 0);
4581 for (i = 0; i < len; i++)
4582 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4583 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4584 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4585 == passed_mode)
4586 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4588 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4589 DECL_INCOMING_RTL (parm) = entry_parm;
4590 break;
4594 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4595 in the mode in which it arrives.
4596 STACK_PARM is an RTX for a stack slot where the parameter can live
4597 during the function (in case we want to put it there).
4598 STACK_PARM is 0 if no stack slot was pushed for it.
4600 Now output code if necessary to convert ENTRY_PARM to
4601 the type in which this function declares it,
4602 and store that result in an appropriate place,
4603 which may be a pseudo reg, may be STACK_PARM,
4604 or may be a local stack slot if STACK_PARM is 0.
4606 Set DECL_RTL to that place. */
4608 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4610 /* If a BLKmode arrives in registers, copy it to a stack slot.
4611 Handle calls that pass values in multiple non-contiguous
4612 locations. The Irix 6 ABI has examples of this. */
4613 if (GET_CODE (entry_parm) == REG
4614 || GET_CODE (entry_parm) == PARALLEL)
4616 int size_stored
4617 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4618 UNITS_PER_WORD);
4620 /* Note that we will be storing an integral number of words.
4621 So we have to be careful to ensure that we allocate an
4622 integral number of words. We do this below in the
4623 assign_stack_local if space was not allocated in the argument
4624 list. If it was, this will not work if PARM_BOUNDARY is not
4625 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4626 if it becomes a problem. */
4628 if (stack_parm == 0)
4630 stack_parm
4631 = assign_stack_local (GET_MODE (entry_parm),
4632 size_stored, 0);
4633 set_mem_attributes (stack_parm, parm, 1);
4636 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4637 abort ();
4639 /* Handle calls that pass values in multiple non-contiguous
4640 locations. The Irix 6 ABI has examples of this. */
4641 if (GET_CODE (entry_parm) == PARALLEL)
4642 emit_group_store (validize_mem (stack_parm), entry_parm,
4643 int_size_in_bytes (TREE_TYPE (parm)));
4644 else
4645 move_block_from_reg (REGNO (entry_parm),
4646 validize_mem (stack_parm),
4647 size_stored / UNITS_PER_WORD,
4648 int_size_in_bytes (TREE_TYPE (parm)));
4650 SET_DECL_RTL (parm, stack_parm);
4652 else if (! ((! optimize
4653 && ! DECL_REGISTER (parm))
4654 || TREE_SIDE_EFFECTS (parm)
4655 /* If -ffloat-store specified, don't put explicit
4656 float variables into registers. */
4657 || (flag_float_store
4658 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4659 /* Always assign pseudo to structure return or item passed
4660 by invisible reference. */
4661 || passed_pointer || parm == function_result_decl)
4663 /* Store the parm in a pseudoregister during the function, but we
4664 may need to do it in a wider mode. */
4666 rtx parmreg;
4667 unsigned int regno, regnoi = 0, regnor = 0;
4669 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4671 promoted_nominal_mode
4672 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4674 parmreg = gen_reg_rtx (promoted_nominal_mode);
4675 mark_user_reg (parmreg);
4677 /* If this was an item that we received a pointer to, set DECL_RTL
4678 appropriately. */
4679 if (passed_pointer)
4681 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4682 parmreg);
4683 set_mem_attributes (x, parm, 1);
4684 SET_DECL_RTL (parm, x);
4686 else
4688 SET_DECL_RTL (parm, parmreg);
4689 maybe_set_unchanging (DECL_RTL (parm), parm);
4692 /* Copy the value into the register. */
4693 if (nominal_mode != passed_mode
4694 || promoted_nominal_mode != promoted_mode)
4696 int save_tree_used;
4697 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4698 mode, by the caller. We now have to convert it to
4699 NOMINAL_MODE, if different. However, PARMREG may be in
4700 a different mode than NOMINAL_MODE if it is being stored
4701 promoted.
4703 If ENTRY_PARM is a hard register, it might be in a register
4704 not valid for operating in its mode (e.g., an odd-numbered
4705 register for a DFmode). In that case, moves are the only
4706 thing valid, so we can't do a convert from there. This
4707 occurs when the calling sequence allow such misaligned
4708 usages.
4710 In addition, the conversion may involve a call, which could
4711 clobber parameters which haven't been copied to pseudo
4712 registers yet. Therefore, we must first copy the parm to
4713 a pseudo reg here, and save the conversion until after all
4714 parameters have been moved. */
4716 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4718 emit_move_insn (tempreg, validize_mem (entry_parm));
4720 push_to_sequence (conversion_insns);
4721 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4723 if (GET_CODE (tempreg) == SUBREG
4724 && GET_MODE (tempreg) == nominal_mode
4725 && GET_CODE (SUBREG_REG (tempreg)) == REG
4726 && nominal_mode == passed_mode
4727 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4728 && GET_MODE_SIZE (GET_MODE (tempreg))
4729 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4731 /* The argument is already sign/zero extended, so note it
4732 into the subreg. */
4733 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4734 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4737 /* TREE_USED gets set erroneously during expand_assignment. */
4738 save_tree_used = TREE_USED (parm);
4739 expand_assignment (parm,
4740 make_tree (nominal_type, tempreg), 0, 0);
4741 TREE_USED (parm) = save_tree_used;
4742 conversion_insns = get_insns ();
4743 did_conversion = 1;
4744 end_sequence ();
4746 else
4747 emit_move_insn (parmreg, validize_mem (entry_parm));
4749 /* If we were passed a pointer but the actual value
4750 can safely live in a register, put it in one. */
4751 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4752 /* If by-reference argument was promoted, demote it. */
4753 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4754 || ! ((! optimize
4755 && ! DECL_REGISTER (parm))
4756 || TREE_SIDE_EFFECTS (parm)
4757 /* If -ffloat-store specified, don't put explicit
4758 float variables into registers. */
4759 || (flag_float_store
4760 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4762 /* We can't use nominal_mode, because it will have been set to
4763 Pmode above. We must use the actual mode of the parm. */
4764 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4765 mark_user_reg (parmreg);
4766 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4768 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4769 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4770 push_to_sequence (conversion_insns);
4771 emit_move_insn (tempreg, DECL_RTL (parm));
4772 SET_DECL_RTL (parm,
4773 convert_to_mode (GET_MODE (parmreg),
4774 tempreg,
4775 unsigned_p));
4776 emit_move_insn (parmreg, DECL_RTL (parm));
4777 conversion_insns = get_insns();
4778 did_conversion = 1;
4779 end_sequence ();
4781 else
4782 emit_move_insn (parmreg, DECL_RTL (parm));
4783 SET_DECL_RTL (parm, parmreg);
4784 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4785 now the parm. */
4786 stack_parm = 0;
4788 #ifdef FUNCTION_ARG_CALLEE_COPIES
4789 /* If we are passed an arg by reference and it is our responsibility
4790 to make a copy, do it now.
4791 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4792 original argument, so we must recreate them in the call to
4793 FUNCTION_ARG_CALLEE_COPIES. */
4794 /* ??? Later add code to handle the case that if the argument isn't
4795 modified, don't do the copy. */
4797 else if (passed_pointer
4798 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4799 TYPE_MODE (DECL_ARG_TYPE (parm)),
4800 DECL_ARG_TYPE (parm),
4801 named_arg)
4802 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4804 rtx copy;
4805 tree type = DECL_ARG_TYPE (parm);
4807 /* This sequence may involve a library call perhaps clobbering
4808 registers that haven't been copied to pseudos yet. */
4810 push_to_sequence (conversion_insns);
4812 if (!COMPLETE_TYPE_P (type)
4813 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4814 /* This is a variable sized object. */
4815 copy = gen_rtx_MEM (BLKmode,
4816 allocate_dynamic_stack_space
4817 (expr_size (parm), NULL_RTX,
4818 TYPE_ALIGN (type)));
4819 else
4820 copy = assign_stack_temp (TYPE_MODE (type),
4821 int_size_in_bytes (type), 1);
4822 set_mem_attributes (copy, parm, 1);
4824 store_expr (parm, copy, 0);
4825 emit_move_insn (parmreg, XEXP (copy, 0));
4826 conversion_insns = get_insns ();
4827 did_conversion = 1;
4828 end_sequence ();
4830 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4832 /* In any case, record the parm's desired stack location
4833 in case we later discover it must live in the stack.
4835 If it is a COMPLEX value, store the stack location for both
4836 halves. */
4838 if (GET_CODE (parmreg) == CONCAT)
4839 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4840 else
4841 regno = REGNO (parmreg);
4843 if (regno >= max_parm_reg)
4845 rtx *new;
4846 int old_max_parm_reg = max_parm_reg;
4848 /* It's slow to expand this one register at a time,
4849 but it's also rare and we need max_parm_reg to be
4850 precisely correct. */
4851 max_parm_reg = regno + 1;
4852 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
4853 max_parm_reg * sizeof (rtx));
4854 memset ((char *) (new + old_max_parm_reg), 0,
4855 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4856 parm_reg_stack_loc = new;
4859 if (GET_CODE (parmreg) == CONCAT)
4861 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4863 regnor = REGNO (gen_realpart (submode, parmreg));
4864 regnoi = REGNO (gen_imagpart (submode, parmreg));
4866 if (stack_parm != 0)
4868 parm_reg_stack_loc[regnor]
4869 = gen_realpart (submode, stack_parm);
4870 parm_reg_stack_loc[regnoi]
4871 = gen_imagpart (submode, stack_parm);
4873 else
4875 parm_reg_stack_loc[regnor] = 0;
4876 parm_reg_stack_loc[regnoi] = 0;
4879 else
4880 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4882 /* Mark the register as eliminable if we did no conversion
4883 and it was copied from memory at a fixed offset,
4884 and the arg pointer was not copied to a pseudo-reg.
4885 If the arg pointer is a pseudo reg or the offset formed
4886 an invalid address, such memory-equivalences
4887 as we make here would screw up life analysis for it. */
4888 if (nominal_mode == passed_mode
4889 && ! did_conversion
4890 && stack_parm != 0
4891 && GET_CODE (stack_parm) == MEM
4892 && stack_offset.var == 0
4893 && reg_mentioned_p (virtual_incoming_args_rtx,
4894 XEXP (stack_parm, 0)))
4896 rtx linsn = get_last_insn ();
4897 rtx sinsn, set;
4899 /* Mark complex types separately. */
4900 if (GET_CODE (parmreg) == CONCAT)
4901 /* Scan backwards for the set of the real and
4902 imaginary parts. */
4903 for (sinsn = linsn; sinsn != 0;
4904 sinsn = prev_nonnote_insn (sinsn))
4906 set = single_set (sinsn);
4907 if (set != 0
4908 && SET_DEST (set) == regno_reg_rtx [regnoi])
4909 REG_NOTES (sinsn)
4910 = gen_rtx_EXPR_LIST (REG_EQUIV,
4911 parm_reg_stack_loc[regnoi],
4912 REG_NOTES (sinsn));
4913 else if (set != 0
4914 && SET_DEST (set) == regno_reg_rtx [regnor])
4915 REG_NOTES (sinsn)
4916 = gen_rtx_EXPR_LIST (REG_EQUIV,
4917 parm_reg_stack_loc[regnor],
4918 REG_NOTES (sinsn));
4920 else if ((set = single_set (linsn)) != 0
4921 && SET_DEST (set) == parmreg)
4922 REG_NOTES (linsn)
4923 = gen_rtx_EXPR_LIST (REG_EQUIV,
4924 stack_parm, REG_NOTES (linsn));
4927 /* For pointer data type, suggest pointer register. */
4928 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4929 mark_reg_pointer (parmreg,
4930 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4932 /* If something wants our address, try to use ADDRESSOF. */
4933 if (TREE_ADDRESSABLE (parm))
4935 /* If we end up putting something into the stack,
4936 fixup_var_refs_insns will need to make a pass over
4937 all the instructions. It looks through the pending
4938 sequences -- but it can't see the ones in the
4939 CONVERSION_INSNS, if they're not on the sequence
4940 stack. So, we go back to that sequence, just so that
4941 the fixups will happen. */
4942 push_to_sequence (conversion_insns);
4943 put_var_into_stack (parm);
4944 conversion_insns = get_insns ();
4945 end_sequence ();
4948 else
4950 /* Value must be stored in the stack slot STACK_PARM
4951 during function execution. */
4953 if (promoted_mode != nominal_mode)
4955 /* Conversion is required. */
4956 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4958 emit_move_insn (tempreg, validize_mem (entry_parm));
4960 push_to_sequence (conversion_insns);
4961 entry_parm = convert_to_mode (nominal_mode, tempreg,
4962 TREE_UNSIGNED (TREE_TYPE (parm)));
4963 if (stack_parm)
4964 /* ??? This may need a big-endian conversion on sparc64. */
4965 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
4967 conversion_insns = get_insns ();
4968 did_conversion = 1;
4969 end_sequence ();
4972 if (entry_parm != stack_parm)
4974 if (stack_parm == 0)
4976 stack_parm
4977 = assign_stack_local (GET_MODE (entry_parm),
4978 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4979 set_mem_attributes (stack_parm, parm, 1);
4982 if (promoted_mode != nominal_mode)
4984 push_to_sequence (conversion_insns);
4985 emit_move_insn (validize_mem (stack_parm),
4986 validize_mem (entry_parm));
4987 conversion_insns = get_insns ();
4988 end_sequence ();
4990 else
4991 emit_move_insn (validize_mem (stack_parm),
4992 validize_mem (entry_parm));
4995 SET_DECL_RTL (parm, stack_parm);
4998 /* If this "parameter" was the place where we are receiving the
4999 function's incoming structure pointer, set up the result. */
5000 if (parm == function_result_decl)
5002 tree result = DECL_RESULT (fndecl);
5003 rtx addr = DECL_RTL (parm);
5004 rtx x;
5006 #ifdef POINTERS_EXTEND_UNSIGNED
5007 if (GET_MODE (addr) != Pmode)
5008 addr = convert_memory_address (Pmode, addr);
5009 #endif
5011 x = gen_rtx_MEM (DECL_MODE (result), addr);
5012 set_mem_attributes (x, result, 1);
5013 SET_DECL_RTL (result, x);
5017 /* Output all parameter conversion instructions (possibly including calls)
5018 now that all parameters have been copied out of hard registers. */
5019 emit_insn (conversion_insns);
5021 last_parm_insn = get_last_insn ();
5023 current_function_args_size = stack_args_size.constant;
5025 /* Adjust function incoming argument size for alignment and
5026 minimum length. */
5028 #ifdef REG_PARM_STACK_SPACE
5029 #ifndef MAYBE_REG_PARM_STACK_SPACE
5030 current_function_args_size = MAX (current_function_args_size,
5031 REG_PARM_STACK_SPACE (fndecl));
5032 #endif
5033 #endif
5035 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5037 current_function_args_size
5038 = ((current_function_args_size + STACK_BYTES - 1)
5039 / STACK_BYTES) * STACK_BYTES;
5041 #ifdef ARGS_GROW_DOWNWARD
5042 current_function_arg_offset_rtx
5043 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5044 : expand_expr (size_diffop (stack_args_size.var,
5045 size_int (-stack_args_size.constant)),
5046 NULL_RTX, VOIDmode, 0));
5047 #else
5048 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5049 #endif
5051 /* See how many bytes, if any, of its args a function should try to pop
5052 on return. */
5054 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5055 current_function_args_size);
5057 /* For stdarg.h function, save info about
5058 regs and stack space used by the named args. */
5060 current_function_args_info = args_so_far;
5062 /* Set the rtx used for the function return value. Put this in its
5063 own variable so any optimizers that need this information don't have
5064 to include tree.h. Do this here so it gets done when an inlined
5065 function gets output. */
5067 current_function_return_rtx
5068 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5069 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5071 /* If scalar return value was computed in a pseudo-reg, or was a named
5072 return value that got dumped to the stack, copy that to the hard
5073 return register. */
5074 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5076 tree decl_result = DECL_RESULT (fndecl);
5077 rtx decl_rtl = DECL_RTL (decl_result);
5079 if (REG_P (decl_rtl)
5080 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5081 : DECL_REGISTER (decl_result))
5083 rtx real_decl_rtl;
5085 #ifdef FUNCTION_OUTGOING_VALUE
5086 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5087 fndecl);
5088 #else
5089 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5090 fndecl);
5091 #endif
5092 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5093 /* The delay slot scheduler assumes that current_function_return_rtx
5094 holds the hard register containing the return value, not a
5095 temporary pseudo. */
5096 current_function_return_rtx = real_decl_rtl;
5101 /* Indicate whether REGNO is an incoming argument to the current function
5102 that was promoted to a wider mode. If so, return the RTX for the
5103 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5104 that REGNO is promoted from and whether the promotion was signed or
5105 unsigned. */
5107 #ifdef PROMOTE_FUNCTION_ARGS
5110 promoted_input_arg (regno, pmode, punsignedp)
5111 unsigned int regno;
5112 enum machine_mode *pmode;
5113 int *punsignedp;
5115 tree arg;
5117 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5118 arg = TREE_CHAIN (arg))
5119 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5120 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5121 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5123 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5124 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5126 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5127 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5128 && mode != DECL_MODE (arg))
5130 *pmode = DECL_MODE (arg);
5131 *punsignedp = unsignedp;
5132 return DECL_INCOMING_RTL (arg);
5136 return 0;
5139 #endif
5141 /* Compute the size and offset from the start of the stacked arguments for a
5142 parm passed in mode PASSED_MODE and with type TYPE.
5144 INITIAL_OFFSET_PTR points to the current offset into the stacked
5145 arguments.
5147 The starting offset and size for this parm are returned in *OFFSET_PTR
5148 and *ARG_SIZE_PTR, respectively.
5150 IN_REGS is nonzero if the argument will be passed in registers. It will
5151 never be set if REG_PARM_STACK_SPACE is not defined.
5153 FNDECL is the function in which the argument was defined.
5155 There are two types of rounding that are done. The first, controlled by
5156 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5157 list to be aligned to the specific boundary (in bits). This rounding
5158 affects the initial and starting offsets, but not the argument size.
5160 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5161 optionally rounds the size of the parm to PARM_BOUNDARY. The
5162 initial offset is not affected by this rounding, while the size always
5163 is and the starting offset may be. */
5165 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5166 initial_offset_ptr is positive because locate_and_pad_parm's
5167 callers pass in the total size of args so far as
5168 initial_offset_ptr. arg_size_ptr is always positive. */
5170 void
5171 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5172 initial_offset_ptr, offset_ptr, arg_size_ptr,
5173 alignment_pad)
5174 enum machine_mode passed_mode;
5175 tree type;
5176 int in_regs ATTRIBUTE_UNUSED;
5177 tree fndecl ATTRIBUTE_UNUSED;
5178 struct args_size *initial_offset_ptr;
5179 struct args_size *offset_ptr;
5180 struct args_size *arg_size_ptr;
5181 struct args_size *alignment_pad;
5184 tree sizetree
5185 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5186 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5187 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5188 #ifdef ARGS_GROW_DOWNWARD
5189 tree s2 = sizetree;
5190 #endif
5192 #ifdef REG_PARM_STACK_SPACE
5193 /* If we have found a stack parm before we reach the end of the
5194 area reserved for registers, skip that area. */
5195 if (! in_regs)
5197 int reg_parm_stack_space = 0;
5199 #ifdef MAYBE_REG_PARM_STACK_SPACE
5200 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5201 #else
5202 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5203 #endif
5204 if (reg_parm_stack_space > 0)
5206 if (initial_offset_ptr->var)
5208 initial_offset_ptr->var
5209 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5210 ssize_int (reg_parm_stack_space));
5211 initial_offset_ptr->constant = 0;
5213 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5214 initial_offset_ptr->constant = reg_parm_stack_space;
5217 #endif /* REG_PARM_STACK_SPACE */
5219 arg_size_ptr->var = 0;
5220 arg_size_ptr->constant = 0;
5221 alignment_pad->var = 0;
5222 alignment_pad->constant = 0;
5224 #ifdef ARGS_GROW_DOWNWARD
5225 if (initial_offset_ptr->var)
5227 offset_ptr->constant = 0;
5228 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5229 initial_offset_ptr->var);
5231 else
5233 offset_ptr->constant = -initial_offset_ptr->constant;
5234 offset_ptr->var = 0;
5237 if (where_pad != none
5238 && (!host_integerp (sizetree, 1)
5239 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5240 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5241 SUB_PARM_SIZE (*offset_ptr, s2);
5243 if (!in_regs
5244 #ifdef REG_PARM_STACK_SPACE
5245 || REG_PARM_STACK_SPACE (fndecl) > 0
5246 #endif
5248 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5250 if (initial_offset_ptr->var)
5251 arg_size_ptr->var = size_binop (MINUS_EXPR,
5252 size_binop (MINUS_EXPR,
5253 ssize_int (0),
5254 initial_offset_ptr->var),
5255 offset_ptr->var);
5257 else
5258 arg_size_ptr->constant = (-initial_offset_ptr->constant
5259 - offset_ptr->constant);
5261 /* Pad_below needs the pre-rounded size to know how much to pad below.
5262 We only pad parameters which are not in registers as they have their
5263 padding done elsewhere. */
5264 if (where_pad == downward
5265 && !in_regs)
5266 pad_below (offset_ptr, passed_mode, sizetree);
5268 #else /* !ARGS_GROW_DOWNWARD */
5269 if (!in_regs
5270 #ifdef REG_PARM_STACK_SPACE
5271 || REG_PARM_STACK_SPACE (fndecl) > 0
5272 #endif
5274 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5275 *offset_ptr = *initial_offset_ptr;
5277 #ifdef PUSH_ROUNDING
5278 if (passed_mode != BLKmode)
5279 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5280 #endif
5282 /* Pad_below needs the pre-rounded size to know how much to pad below
5283 so this must be done before rounding up. */
5284 if (where_pad == downward
5285 /* However, BLKmode args passed in regs have their padding done elsewhere.
5286 The stack slot must be able to hold the entire register. */
5287 && !(in_regs && passed_mode == BLKmode))
5288 pad_below (offset_ptr, passed_mode, sizetree);
5290 if (where_pad != none
5291 && (!host_integerp (sizetree, 1)
5292 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5293 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5295 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5296 #endif /* ARGS_GROW_DOWNWARD */
5299 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5300 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5302 static void
5303 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5304 struct args_size *offset_ptr;
5305 int boundary;
5306 struct args_size *alignment_pad;
5308 tree save_var = NULL_TREE;
5309 HOST_WIDE_INT save_constant = 0;
5311 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5313 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5315 save_var = offset_ptr->var;
5316 save_constant = offset_ptr->constant;
5319 alignment_pad->var = NULL_TREE;
5320 alignment_pad->constant = 0;
5322 if (boundary > BITS_PER_UNIT)
5324 if (offset_ptr->var)
5326 offset_ptr->var =
5327 #ifdef ARGS_GROW_DOWNWARD
5328 round_down
5329 #else
5330 round_up
5331 #endif
5332 (ARGS_SIZE_TREE (*offset_ptr),
5333 boundary / BITS_PER_UNIT);
5334 offset_ptr->constant = 0; /*?*/
5335 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5336 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5337 save_var);
5339 else
5341 offset_ptr->constant =
5342 #ifdef ARGS_GROW_DOWNWARD
5343 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5344 #else
5345 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5346 #endif
5347 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5348 alignment_pad->constant = offset_ptr->constant - save_constant;
5353 static void
5354 pad_below (offset_ptr, passed_mode, sizetree)
5355 struct args_size *offset_ptr;
5356 enum machine_mode passed_mode;
5357 tree sizetree;
5359 if (passed_mode != BLKmode)
5361 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5362 offset_ptr->constant
5363 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5364 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5365 - GET_MODE_SIZE (passed_mode));
5367 else
5369 if (TREE_CODE (sizetree) != INTEGER_CST
5370 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5372 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5373 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5374 /* Add it in. */
5375 ADD_PARM_SIZE (*offset_ptr, s2);
5376 SUB_PARM_SIZE (*offset_ptr, sizetree);
5381 /* Walk the tree of blocks describing the binding levels within a function
5382 and warn about uninitialized variables.
5383 This is done after calling flow_analysis and before global_alloc
5384 clobbers the pseudo-regs to hard regs. */
5386 void
5387 uninitialized_vars_warning (block)
5388 tree block;
5390 tree decl, sub;
5391 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5393 if (warn_uninitialized
5394 && TREE_CODE (decl) == VAR_DECL
5395 /* These warnings are unreliable for and aggregates
5396 because assigning the fields one by one can fail to convince
5397 flow.c that the entire aggregate was initialized.
5398 Unions are troublesome because members may be shorter. */
5399 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5400 && DECL_RTL (decl) != 0
5401 && GET_CODE (DECL_RTL (decl)) == REG
5402 /* Global optimizations can make it difficult to determine if a
5403 particular variable has been initialized. However, a VAR_DECL
5404 with a nonzero DECL_INITIAL had an initializer, so do not
5405 claim it is potentially uninitialized.
5407 We do not care about the actual value in DECL_INITIAL, so we do
5408 not worry that it may be a dangling pointer. */
5409 && DECL_INITIAL (decl) == NULL_TREE
5410 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5411 warning_with_decl (decl,
5412 "`%s' might be used uninitialized in this function");
5413 if (extra_warnings
5414 && TREE_CODE (decl) == VAR_DECL
5415 && DECL_RTL (decl) != 0
5416 && GET_CODE (DECL_RTL (decl)) == REG
5417 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5418 warning_with_decl (decl,
5419 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5421 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5422 uninitialized_vars_warning (sub);
5425 /* Do the appropriate part of uninitialized_vars_warning
5426 but for arguments instead of local variables. */
5428 void
5429 setjmp_args_warning ()
5431 tree decl;
5432 for (decl = DECL_ARGUMENTS (current_function_decl);
5433 decl; decl = TREE_CHAIN (decl))
5434 if (DECL_RTL (decl) != 0
5435 && GET_CODE (DECL_RTL (decl)) == REG
5436 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5437 warning_with_decl (decl,
5438 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5441 /* If this function call setjmp, put all vars into the stack
5442 unless they were declared `register'. */
5444 void
5445 setjmp_protect (block)
5446 tree block;
5448 tree decl, sub;
5449 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5450 if ((TREE_CODE (decl) == VAR_DECL
5451 || TREE_CODE (decl) == PARM_DECL)
5452 && DECL_RTL (decl) != 0
5453 && (GET_CODE (DECL_RTL (decl)) == REG
5454 || (GET_CODE (DECL_RTL (decl)) == MEM
5455 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5456 /* If this variable came from an inline function, it must be
5457 that its life doesn't overlap the setjmp. If there was a
5458 setjmp in the function, it would already be in memory. We
5459 must exclude such variable because their DECL_RTL might be
5460 set to strange things such as virtual_stack_vars_rtx. */
5461 && ! DECL_FROM_INLINE (decl)
5462 && (
5463 #ifdef NON_SAVING_SETJMP
5464 /* If longjmp doesn't restore the registers,
5465 don't put anything in them. */
5466 NON_SAVING_SETJMP
5468 #endif
5469 ! DECL_REGISTER (decl)))
5470 put_var_into_stack (decl);
5471 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5472 setjmp_protect (sub);
5475 /* Like the previous function, but for args instead of local variables. */
5477 void
5478 setjmp_protect_args ()
5480 tree decl;
5481 for (decl = DECL_ARGUMENTS (current_function_decl);
5482 decl; decl = TREE_CHAIN (decl))
5483 if ((TREE_CODE (decl) == VAR_DECL
5484 || TREE_CODE (decl) == PARM_DECL)
5485 && DECL_RTL (decl) != 0
5486 && (GET_CODE (DECL_RTL (decl)) == REG
5487 || (GET_CODE (DECL_RTL (decl)) == MEM
5488 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5489 && (
5490 /* If longjmp doesn't restore the registers,
5491 don't put anything in them. */
5492 #ifdef NON_SAVING_SETJMP
5493 NON_SAVING_SETJMP
5495 #endif
5496 ! DECL_REGISTER (decl)))
5497 put_var_into_stack (decl);
5500 /* Return the context-pointer register corresponding to DECL,
5501 or 0 if it does not need one. */
5504 lookup_static_chain (decl)
5505 tree decl;
5507 tree context = decl_function_context (decl);
5508 tree link;
5510 if (context == 0
5511 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5512 return 0;
5514 /* We treat inline_function_decl as an alias for the current function
5515 because that is the inline function whose vars, types, etc.
5516 are being merged into the current function.
5517 See expand_inline_function. */
5518 if (context == current_function_decl || context == inline_function_decl)
5519 return virtual_stack_vars_rtx;
5521 for (link = context_display; link; link = TREE_CHAIN (link))
5522 if (TREE_PURPOSE (link) == context)
5523 return RTL_EXPR_RTL (TREE_VALUE (link));
5525 abort ();
5528 /* Convert a stack slot address ADDR for variable VAR
5529 (from a containing function)
5530 into an address valid in this function (using a static chain). */
5533 fix_lexical_addr (addr, var)
5534 rtx addr;
5535 tree var;
5537 rtx basereg;
5538 HOST_WIDE_INT displacement;
5539 tree context = decl_function_context (var);
5540 struct function *fp;
5541 rtx base = 0;
5543 /* If this is the present function, we need not do anything. */
5544 if (context == current_function_decl || context == inline_function_decl)
5545 return addr;
5547 fp = find_function_data (context);
5549 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5550 addr = XEXP (XEXP (addr, 0), 0);
5552 /* Decode given address as base reg plus displacement. */
5553 if (GET_CODE (addr) == REG)
5554 basereg = addr, displacement = 0;
5555 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5556 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5557 else
5558 abort ();
5560 /* We accept vars reached via the containing function's
5561 incoming arg pointer and via its stack variables pointer. */
5562 if (basereg == fp->internal_arg_pointer)
5564 /* If reached via arg pointer, get the arg pointer value
5565 out of that function's stack frame.
5567 There are two cases: If a separate ap is needed, allocate a
5568 slot in the outer function for it and dereference it that way.
5569 This is correct even if the real ap is actually a pseudo.
5570 Otherwise, just adjust the offset from the frame pointer to
5571 compensate. */
5573 #ifdef NEED_SEPARATE_AP
5574 rtx addr;
5576 addr = get_arg_pointer_save_area (fp);
5577 addr = fix_lexical_addr (XEXP (addr, 0), var);
5578 addr = memory_address (Pmode, addr);
5580 base = gen_rtx_MEM (Pmode, addr);
5581 set_mem_alias_set (base, get_frame_alias_set ());
5582 base = copy_to_reg (base);
5583 #else
5584 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5585 base = lookup_static_chain (var);
5586 #endif
5589 else if (basereg == virtual_stack_vars_rtx)
5591 /* This is the same code as lookup_static_chain, duplicated here to
5592 avoid an extra call to decl_function_context. */
5593 tree link;
5595 for (link = context_display; link; link = TREE_CHAIN (link))
5596 if (TREE_PURPOSE (link) == context)
5598 base = RTL_EXPR_RTL (TREE_VALUE (link));
5599 break;
5603 if (base == 0)
5604 abort ();
5606 /* Use same offset, relative to appropriate static chain or argument
5607 pointer. */
5608 return plus_constant (base, displacement);
5611 /* Return the address of the trampoline for entering nested fn FUNCTION.
5612 If necessary, allocate a trampoline (in the stack frame)
5613 and emit rtl to initialize its contents (at entry to this function). */
5616 trampoline_address (function)
5617 tree function;
5619 tree link;
5620 tree rtlexp;
5621 rtx tramp;
5622 struct function *fp;
5623 tree fn_context;
5625 /* Find an existing trampoline and return it. */
5626 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5627 if (TREE_PURPOSE (link) == function)
5628 return
5629 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5631 for (fp = outer_function_chain; fp; fp = fp->outer)
5632 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5633 if (TREE_PURPOSE (link) == function)
5635 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5636 function);
5637 return adjust_trampoline_addr (tramp);
5640 /* None exists; we must make one. */
5642 /* Find the `struct function' for the function containing FUNCTION. */
5643 fp = 0;
5644 fn_context = decl_function_context (function);
5645 if (fn_context != current_function_decl
5646 && fn_context != inline_function_decl)
5647 fp = find_function_data (fn_context);
5649 /* Allocate run-time space for this trampoline
5650 (usually in the defining function's stack frame). */
5651 #ifdef ALLOCATE_TRAMPOLINE
5652 tramp = ALLOCATE_TRAMPOLINE (fp);
5653 #else
5654 /* If rounding needed, allocate extra space
5655 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5656 #define TRAMPOLINE_REAL_SIZE \
5657 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5658 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5659 fp ? fp : cfun);
5660 #endif
5662 /* Record the trampoline for reuse and note it for later initialization
5663 by expand_function_end. */
5664 if (fp != 0)
5666 rtlexp = make_node (RTL_EXPR);
5667 RTL_EXPR_RTL (rtlexp) = tramp;
5668 fp->x_trampoline_list = tree_cons (function, rtlexp,
5669 fp->x_trampoline_list);
5671 else
5673 /* Make the RTL_EXPR node temporary, not momentary, so that the
5674 trampoline_list doesn't become garbage. */
5675 rtlexp = make_node (RTL_EXPR);
5677 RTL_EXPR_RTL (rtlexp) = tramp;
5678 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5681 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5682 return adjust_trampoline_addr (tramp);
5685 /* Given a trampoline address,
5686 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5688 static rtx
5689 round_trampoline_addr (tramp)
5690 rtx tramp;
5692 /* Round address up to desired boundary. */
5693 rtx temp = gen_reg_rtx (Pmode);
5694 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5695 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5697 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5698 temp, 0, OPTAB_LIB_WIDEN);
5699 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5700 temp, 0, OPTAB_LIB_WIDEN);
5702 return tramp;
5705 /* Given a trampoline address, round it then apply any
5706 platform-specific adjustments so that the result can be used for a
5707 function call . */
5709 static rtx
5710 adjust_trampoline_addr (tramp)
5711 rtx tramp;
5713 tramp = round_trampoline_addr (tramp);
5714 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5715 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5716 #endif
5717 return tramp;
5720 /* Put all this function's BLOCK nodes including those that are chained
5721 onto the first block into a vector, and return it.
5722 Also store in each NOTE for the beginning or end of a block
5723 the index of that block in the vector.
5724 The arguments are BLOCK, the chain of top-level blocks of the function,
5725 and INSNS, the insn chain of the function. */
5727 void
5728 identify_blocks ()
5730 int n_blocks;
5731 tree *block_vector, *last_block_vector;
5732 tree *block_stack;
5733 tree block = DECL_INITIAL (current_function_decl);
5735 if (block == 0)
5736 return;
5738 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5739 depth-first order. */
5740 block_vector = get_block_vector (block, &n_blocks);
5741 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5743 last_block_vector = identify_blocks_1 (get_insns (),
5744 block_vector + 1,
5745 block_vector + n_blocks,
5746 block_stack);
5748 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5749 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5750 if (0 && last_block_vector != block_vector + n_blocks)
5751 abort ();
5753 free (block_vector);
5754 free (block_stack);
5757 /* Subroutine of identify_blocks. Do the block substitution on the
5758 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5760 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5761 BLOCK_VECTOR is incremented for each block seen. */
5763 static tree *
5764 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5765 rtx insns;
5766 tree *block_vector;
5767 tree *end_block_vector;
5768 tree *orig_block_stack;
5770 rtx insn;
5771 tree *block_stack = orig_block_stack;
5773 for (insn = insns; insn; insn = NEXT_INSN (insn))
5775 if (GET_CODE (insn) == NOTE)
5777 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5779 tree b;
5781 /* If there are more block notes than BLOCKs, something
5782 is badly wrong. */
5783 if (block_vector == end_block_vector)
5784 abort ();
5786 b = *block_vector++;
5787 NOTE_BLOCK (insn) = b;
5788 *block_stack++ = b;
5790 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5792 /* If there are more NOTE_INSN_BLOCK_ENDs than
5793 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5794 if (block_stack == orig_block_stack)
5795 abort ();
5797 NOTE_BLOCK (insn) = *--block_stack;
5800 else if (GET_CODE (insn) == CALL_INSN
5801 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5803 rtx cp = PATTERN (insn);
5805 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5806 end_block_vector, block_stack);
5807 if (XEXP (cp, 1))
5808 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5809 end_block_vector, block_stack);
5810 if (XEXP (cp, 2))
5811 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5812 end_block_vector, block_stack);
5816 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5817 something is badly wrong. */
5818 if (block_stack != orig_block_stack)
5819 abort ();
5821 return block_vector;
5824 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5825 and create duplicate blocks. */
5826 /* ??? Need an option to either create block fragments or to create
5827 abstract origin duplicates of a source block. It really depends
5828 on what optimization has been performed. */
5830 void
5831 reorder_blocks ()
5833 tree block = DECL_INITIAL (current_function_decl);
5834 varray_type block_stack;
5836 if (block == NULL_TREE)
5837 return;
5839 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5841 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5842 reorder_blocks_0 (block);
5844 /* Prune the old trees away, so that they don't get in the way. */
5845 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5846 BLOCK_CHAIN (block) = NULL_TREE;
5848 /* Recreate the block tree from the note nesting. */
5849 reorder_blocks_1 (get_insns (), block, &block_stack);
5850 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5852 /* Remove deleted blocks from the block fragment chains. */
5853 reorder_fix_fragments (block);
5856 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5858 static void
5859 reorder_blocks_0 (block)
5860 tree block;
5862 while (block)
5864 TREE_ASM_WRITTEN (block) = 0;
5865 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5866 block = BLOCK_CHAIN (block);
5870 static void
5871 reorder_blocks_1 (insns, current_block, p_block_stack)
5872 rtx insns;
5873 tree current_block;
5874 varray_type *p_block_stack;
5876 rtx insn;
5878 for (insn = insns; insn; insn = NEXT_INSN (insn))
5880 if (GET_CODE (insn) == NOTE)
5882 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5884 tree block = NOTE_BLOCK (insn);
5886 /* If we have seen this block before, that means it now
5887 spans multiple address regions. Create a new fragment. */
5888 if (TREE_ASM_WRITTEN (block))
5890 tree new_block = copy_node (block);
5891 tree origin;
5893 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5894 ? BLOCK_FRAGMENT_ORIGIN (block)
5895 : block);
5896 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5897 BLOCK_FRAGMENT_CHAIN (new_block)
5898 = BLOCK_FRAGMENT_CHAIN (origin);
5899 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5901 NOTE_BLOCK (insn) = new_block;
5902 block = new_block;
5905 BLOCK_SUBBLOCKS (block) = 0;
5906 TREE_ASM_WRITTEN (block) = 1;
5907 BLOCK_SUPERCONTEXT (block) = current_block;
5908 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5909 BLOCK_SUBBLOCKS (current_block) = block;
5910 current_block = block;
5911 VARRAY_PUSH_TREE (*p_block_stack, block);
5913 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5915 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5916 VARRAY_POP (*p_block_stack);
5917 BLOCK_SUBBLOCKS (current_block)
5918 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5919 current_block = BLOCK_SUPERCONTEXT (current_block);
5922 else if (GET_CODE (insn) == CALL_INSN
5923 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5925 rtx cp = PATTERN (insn);
5926 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5927 if (XEXP (cp, 1))
5928 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5929 if (XEXP (cp, 2))
5930 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5935 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5936 appears in the block tree, select one of the fragments to become
5937 the new origin block. */
5939 static void
5940 reorder_fix_fragments (block)
5941 tree block;
5943 while (block)
5945 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5946 tree new_origin = NULL_TREE;
5948 if (dup_origin)
5950 if (! TREE_ASM_WRITTEN (dup_origin))
5952 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
5954 /* Find the first of the remaining fragments. There must
5955 be at least one -- the current block. */
5956 while (! TREE_ASM_WRITTEN (new_origin))
5957 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
5958 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
5961 else if (! dup_origin)
5962 new_origin = block;
5964 /* Re-root the rest of the fragments to the new origin. In the
5965 case that DUP_ORIGIN was null, that means BLOCK was the origin
5966 of a chain of fragments and we want to remove those fragments
5967 that didn't make it to the output. */
5968 if (new_origin)
5970 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
5971 tree chain = *pp;
5973 while (chain)
5975 if (TREE_ASM_WRITTEN (chain))
5977 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
5978 *pp = chain;
5979 pp = &BLOCK_FRAGMENT_CHAIN (chain);
5981 chain = BLOCK_FRAGMENT_CHAIN (chain);
5983 *pp = NULL_TREE;
5986 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
5987 block = BLOCK_CHAIN (block);
5991 /* Reverse the order of elements in the chain T of blocks,
5992 and return the new head of the chain (old last element). */
5994 static tree
5995 blocks_nreverse (t)
5996 tree t;
5998 tree prev = 0, decl, next;
5999 for (decl = t; decl; decl = next)
6001 next = BLOCK_CHAIN (decl);
6002 BLOCK_CHAIN (decl) = prev;
6003 prev = decl;
6005 return prev;
6008 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6009 non-NULL, list them all into VECTOR, in a depth-first preorder
6010 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6011 blocks. */
6013 static int
6014 all_blocks (block, vector)
6015 tree block;
6016 tree *vector;
6018 int n_blocks = 0;
6020 while (block)
6022 TREE_ASM_WRITTEN (block) = 0;
6024 /* Record this block. */
6025 if (vector)
6026 vector[n_blocks] = block;
6028 ++n_blocks;
6030 /* Record the subblocks, and their subblocks... */
6031 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6032 vector ? vector + n_blocks : 0);
6033 block = BLOCK_CHAIN (block);
6036 return n_blocks;
6039 /* Return a vector containing all the blocks rooted at BLOCK. The
6040 number of elements in the vector is stored in N_BLOCKS_P. The
6041 vector is dynamically allocated; it is the caller's responsibility
6042 to call `free' on the pointer returned. */
6044 static tree *
6045 get_block_vector (block, n_blocks_p)
6046 tree block;
6047 int *n_blocks_p;
6049 tree *block_vector;
6051 *n_blocks_p = all_blocks (block, NULL);
6052 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6053 all_blocks (block, block_vector);
6055 return block_vector;
6058 static int next_block_index = 2;
6060 /* Set BLOCK_NUMBER for all the blocks in FN. */
6062 void
6063 number_blocks (fn)
6064 tree fn;
6066 int i;
6067 int n_blocks;
6068 tree *block_vector;
6070 /* For SDB and XCOFF debugging output, we start numbering the blocks
6071 from 1 within each function, rather than keeping a running
6072 count. */
6073 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6074 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6075 next_block_index = 1;
6076 #endif
6078 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6080 /* The top-level BLOCK isn't numbered at all. */
6081 for (i = 1; i < n_blocks; ++i)
6082 /* We number the blocks from two. */
6083 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6085 free (block_vector);
6087 return;
6090 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6092 tree
6093 debug_find_var_in_block_tree (var, block)
6094 tree var;
6095 tree block;
6097 tree t;
6099 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6100 if (t == var)
6101 return block;
6103 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6105 tree ret = debug_find_var_in_block_tree (var, t);
6106 if (ret)
6107 return ret;
6110 return NULL_TREE;
6113 /* Allocate a function structure and reset its contents to the defaults. */
6115 static void
6116 prepare_function_start ()
6118 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6120 init_stmt_for_function ();
6121 init_eh_for_function ();
6123 cse_not_expected = ! optimize;
6125 /* Caller save not needed yet. */
6126 caller_save_needed = 0;
6128 /* No stack slots have been made yet. */
6129 stack_slot_list = 0;
6131 current_function_has_nonlocal_label = 0;
6132 current_function_has_nonlocal_goto = 0;
6134 /* There is no stack slot for handling nonlocal gotos. */
6135 nonlocal_goto_handler_slots = 0;
6136 nonlocal_goto_stack_level = 0;
6138 /* No labels have been declared for nonlocal use. */
6139 nonlocal_labels = 0;
6140 nonlocal_goto_handler_labels = 0;
6142 /* No function calls so far in this function. */
6143 function_call_count = 0;
6145 /* No parm regs have been allocated.
6146 (This is important for output_inline_function.) */
6147 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6149 /* Initialize the RTL mechanism. */
6150 init_emit ();
6152 /* Initialize the queue of pending postincrement and postdecrements,
6153 and some other info in expr.c. */
6154 init_expr ();
6156 /* We haven't done register allocation yet. */
6157 reg_renumber = 0;
6159 init_varasm_status (cfun);
6161 /* Clear out data used for inlining. */
6162 cfun->inlinable = 0;
6163 cfun->original_decl_initial = 0;
6164 cfun->original_arg_vector = 0;
6166 cfun->stack_alignment_needed = STACK_BOUNDARY;
6167 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6169 /* Set if a call to setjmp is seen. */
6170 current_function_calls_setjmp = 0;
6172 /* Set if a call to longjmp is seen. */
6173 current_function_calls_longjmp = 0;
6175 current_function_calls_alloca = 0;
6176 current_function_calls_eh_return = 0;
6177 current_function_calls_constant_p = 0;
6178 current_function_contains_functions = 0;
6179 current_function_is_leaf = 0;
6180 current_function_nothrow = 0;
6181 current_function_sp_is_unchanging = 0;
6182 current_function_uses_only_leaf_regs = 0;
6183 current_function_has_computed_jump = 0;
6184 current_function_is_thunk = 0;
6186 current_function_returns_pcc_struct = 0;
6187 current_function_returns_struct = 0;
6188 current_function_epilogue_delay_list = 0;
6189 current_function_uses_const_pool = 0;
6190 current_function_uses_pic_offset_table = 0;
6191 current_function_cannot_inline = 0;
6193 /* We have not yet needed to make a label to jump to for tail-recursion. */
6194 tail_recursion_label = 0;
6196 /* We haven't had a need to make a save area for ap yet. */
6197 arg_pointer_save_area = 0;
6199 /* No stack slots allocated yet. */
6200 frame_offset = 0;
6202 /* No SAVE_EXPRs in this function yet. */
6203 save_expr_regs = 0;
6205 /* No RTL_EXPRs in this function yet. */
6206 rtl_expr_chain = 0;
6208 /* Set up to allocate temporaries. */
6209 init_temp_slots ();
6211 /* Indicate that we need to distinguish between the return value of the
6212 present function and the return value of a function being called. */
6213 rtx_equal_function_value_matters = 1;
6215 /* Indicate that we have not instantiated virtual registers yet. */
6216 virtuals_instantiated = 0;
6218 /* Indicate that we want CONCATs now. */
6219 generating_concat_p = 1;
6221 /* Indicate we have no need of a frame pointer yet. */
6222 frame_pointer_needed = 0;
6224 /* By default assume not stdarg. */
6225 current_function_stdarg = 0;
6227 /* We haven't made any trampolines for this function yet. */
6228 trampoline_list = 0;
6230 init_pending_stack_adjust ();
6231 inhibit_defer_pop = 0;
6233 current_function_outgoing_args_size = 0;
6235 current_function_funcdef_no = funcdef_no++;
6237 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6239 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6241 cfun->max_jumptable_ents = 0;
6243 (*lang_hooks.function.init) (cfun);
6244 if (init_machine_status)
6245 cfun->machine = (*init_machine_status) ();
6248 /* Initialize the rtl expansion mechanism so that we can do simple things
6249 like generate sequences. This is used to provide a context during global
6250 initialization of some passes. */
6251 void
6252 init_dummy_function_start ()
6254 prepare_function_start ();
6257 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6258 and initialize static variables for generating RTL for the statements
6259 of the function. */
6261 void
6262 init_function_start (subr, filename, line)
6263 tree subr;
6264 const char *filename;
6265 int line;
6267 prepare_function_start ();
6269 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6270 cfun->decl = subr;
6272 /* Nonzero if this is a nested function that uses a static chain. */
6274 current_function_needs_context
6275 = (decl_function_context (current_function_decl) != 0
6276 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6278 /* Within function body, compute a type's size as soon it is laid out. */
6279 immediate_size_expand++;
6281 /* Prevent ever trying to delete the first instruction of a function.
6282 Also tell final how to output a linenum before the function prologue.
6283 Note linenums could be missing, e.g. when compiling a Java .class file. */
6284 if (line > 0)
6285 emit_line_note (filename, line);
6287 /* Make sure first insn is a note even if we don't want linenums.
6288 This makes sure the first insn will never be deleted.
6289 Also, final expects a note to appear there. */
6290 emit_note (NULL, NOTE_INSN_DELETED);
6292 /* Set flags used by final.c. */
6293 if (aggregate_value_p (DECL_RESULT (subr)))
6295 #ifdef PCC_STATIC_STRUCT_RETURN
6296 current_function_returns_pcc_struct = 1;
6297 #endif
6298 current_function_returns_struct = 1;
6301 /* Warn if this value is an aggregate type,
6302 regardless of which calling convention we are using for it. */
6303 if (warn_aggregate_return
6304 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6305 warning ("function returns an aggregate");
6307 current_function_returns_pointer
6308 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6311 /* Make sure all values used by the optimization passes have sane
6312 defaults. */
6313 void
6314 init_function_for_compilation ()
6316 reg_renumber = 0;
6318 /* No prologue/epilogue insns yet. */
6319 VARRAY_GROW (prologue, 0);
6320 VARRAY_GROW (epilogue, 0);
6321 VARRAY_GROW (sibcall_epilogue, 0);
6324 /* Expand a call to __main at the beginning of a possible main function. */
6326 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6327 #undef HAS_INIT_SECTION
6328 #define HAS_INIT_SECTION
6329 #endif
6331 void
6332 expand_main_function ()
6334 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6335 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6337 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6338 rtx tmp, seq;
6340 start_sequence ();
6341 /* Forcibly align the stack. */
6342 #ifdef STACK_GROWS_DOWNWARD
6343 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6344 stack_pointer_rtx, 1, OPTAB_WIDEN);
6345 #else
6346 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6347 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6348 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6349 stack_pointer_rtx, 1, OPTAB_WIDEN);
6350 #endif
6351 if (tmp != stack_pointer_rtx)
6352 emit_move_insn (stack_pointer_rtx, tmp);
6354 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6355 tmp = force_reg (Pmode, const0_rtx);
6356 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6357 seq = get_insns ();
6358 end_sequence ();
6360 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6361 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6362 break;
6363 if (tmp)
6364 emit_insn_before (seq, tmp);
6365 else
6366 emit_insn (seq);
6368 #endif
6370 #ifndef HAS_INIT_SECTION
6371 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6372 VOIDmode, 0);
6373 #endif
6376 /* The PENDING_SIZES represent the sizes of variable-sized types.
6377 Create RTL for the various sizes now (using temporary variables),
6378 so that we can refer to the sizes from the RTL we are generating
6379 for the current function. The PENDING_SIZES are a TREE_LIST. The
6380 TREE_VALUE of each node is a SAVE_EXPR. */
6382 void
6383 expand_pending_sizes (pending_sizes)
6384 tree pending_sizes;
6386 tree tem;
6388 /* Evaluate now the sizes of any types declared among the arguments. */
6389 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6391 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6392 /* Flush the queue in case this parameter declaration has
6393 side-effects. */
6394 emit_queue ();
6398 /* Start the RTL for a new function, and set variables used for
6399 emitting RTL.
6400 SUBR is the FUNCTION_DECL node.
6401 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6402 the function's parameters, which must be run at any return statement. */
6404 void
6405 expand_function_start (subr, parms_have_cleanups)
6406 tree subr;
6407 int parms_have_cleanups;
6409 tree tem;
6410 rtx last_ptr = NULL_RTX;
6412 /* Make sure volatile mem refs aren't considered
6413 valid operands of arithmetic insns. */
6414 init_recog_no_volatile ();
6416 current_function_instrument_entry_exit
6417 = (flag_instrument_function_entry_exit
6418 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6420 current_function_profile
6421 = (profile_flag
6422 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6424 current_function_limit_stack
6425 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6427 /* If function gets a static chain arg, store it in the stack frame.
6428 Do this first, so it gets the first stack slot offset. */
6429 if (current_function_needs_context)
6431 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6433 /* Delay copying static chain if it is not a register to avoid
6434 conflicts with regs used for parameters. */
6435 if (! SMALL_REGISTER_CLASSES
6436 || GET_CODE (static_chain_incoming_rtx) == REG)
6437 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6440 /* If the parameters of this function need cleaning up, get a label
6441 for the beginning of the code which executes those cleanups. This must
6442 be done before doing anything with return_label. */
6443 if (parms_have_cleanups)
6444 cleanup_label = gen_label_rtx ();
6445 else
6446 cleanup_label = 0;
6448 /* Make the label for return statements to jump to. Do not special
6449 case machines with special return instructions -- they will be
6450 handled later during jump, ifcvt, or epilogue creation. */
6451 return_label = gen_label_rtx ();
6453 /* Initialize rtx used to return the value. */
6454 /* Do this before assign_parms so that we copy the struct value address
6455 before any library calls that assign parms might generate. */
6457 /* Decide whether to return the value in memory or in a register. */
6458 if (aggregate_value_p (DECL_RESULT (subr)))
6460 /* Returning something that won't go in a register. */
6461 rtx value_address = 0;
6463 #ifdef PCC_STATIC_STRUCT_RETURN
6464 if (current_function_returns_pcc_struct)
6466 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6467 value_address = assemble_static_space (size);
6469 else
6470 #endif
6472 /* Expect to be passed the address of a place to store the value.
6473 If it is passed as an argument, assign_parms will take care of
6474 it. */
6475 if (struct_value_incoming_rtx)
6477 value_address = gen_reg_rtx (Pmode);
6478 emit_move_insn (value_address, struct_value_incoming_rtx);
6481 if (value_address)
6483 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6484 set_mem_attributes (x, DECL_RESULT (subr), 1);
6485 SET_DECL_RTL (DECL_RESULT (subr), x);
6488 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6489 /* If return mode is void, this decl rtl should not be used. */
6490 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6491 else
6493 /* Compute the return values into a pseudo reg, which we will copy
6494 into the true return register after the cleanups are done. */
6496 /* In order to figure out what mode to use for the pseudo, we
6497 figure out what the mode of the eventual return register will
6498 actually be, and use that. */
6499 rtx hard_reg
6500 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6501 subr, 1);
6503 /* Structures that are returned in registers are not aggregate_value_p,
6504 so we may see a PARALLEL or a REG. */
6505 if (REG_P (hard_reg))
6506 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6507 else if (GET_CODE (hard_reg) == PARALLEL)
6508 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6509 else
6510 abort ();
6512 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6513 result to the real return register(s). */
6514 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6517 /* Initialize rtx for parameters and local variables.
6518 In some cases this requires emitting insns. */
6520 assign_parms (subr);
6522 /* Copy the static chain now if it wasn't a register. The delay is to
6523 avoid conflicts with the parameter passing registers. */
6525 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6526 if (GET_CODE (static_chain_incoming_rtx) != REG)
6527 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6529 /* The following was moved from init_function_start.
6530 The move is supposed to make sdb output more accurate. */
6531 /* Indicate the beginning of the function body,
6532 as opposed to parm setup. */
6533 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6535 if (GET_CODE (get_last_insn ()) != NOTE)
6536 emit_note (NULL, NOTE_INSN_DELETED);
6537 parm_birth_insn = get_last_insn ();
6539 context_display = 0;
6540 if (current_function_needs_context)
6542 /* Fetch static chain values for containing functions. */
6543 tem = decl_function_context (current_function_decl);
6544 /* Copy the static chain pointer into a pseudo. If we have
6545 small register classes, copy the value from memory if
6546 static_chain_incoming_rtx is a REG. */
6547 if (tem)
6549 /* If the static chain originally came in a register, put it back
6550 there, then move it out in the next insn. The reason for
6551 this peculiar code is to satisfy function integration. */
6552 if (SMALL_REGISTER_CLASSES
6553 && GET_CODE (static_chain_incoming_rtx) == REG)
6554 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6555 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6558 while (tem)
6560 tree rtlexp = make_node (RTL_EXPR);
6562 RTL_EXPR_RTL (rtlexp) = last_ptr;
6563 context_display = tree_cons (tem, rtlexp, context_display);
6564 tem = decl_function_context (tem);
6565 if (tem == 0)
6566 break;
6567 /* Chain thru stack frames, assuming pointer to next lexical frame
6568 is found at the place we always store it. */
6569 #ifdef FRAME_GROWS_DOWNWARD
6570 last_ptr = plus_constant (last_ptr,
6571 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6572 #endif
6573 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6574 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6575 last_ptr = copy_to_reg (last_ptr);
6577 /* If we are not optimizing, ensure that we know that this
6578 piece of context is live over the entire function. */
6579 if (! optimize)
6580 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6581 save_expr_regs);
6585 if (current_function_instrument_entry_exit)
6587 rtx fun = DECL_RTL (current_function_decl);
6588 if (GET_CODE (fun) == MEM)
6589 fun = XEXP (fun, 0);
6590 else
6591 abort ();
6592 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6593 2, fun, Pmode,
6594 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6596 hard_frame_pointer_rtx),
6597 Pmode);
6600 if (current_function_profile)
6602 #ifdef PROFILE_HOOK
6603 PROFILE_HOOK (current_function_funcdef_no);
6604 #endif
6607 /* After the display initializations is where the tail-recursion label
6608 should go, if we end up needing one. Ensure we have a NOTE here
6609 since some things (like trampolines) get placed before this. */
6610 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6612 /* Evaluate now the sizes of any types declared among the arguments. */
6613 expand_pending_sizes (nreverse (get_pending_sizes ()));
6615 /* Make sure there is a line number after the function entry setup code. */
6616 force_next_line_note ();
6619 /* Undo the effects of init_dummy_function_start. */
6620 void
6621 expand_dummy_function_end ()
6623 /* End any sequences that failed to be closed due to syntax errors. */
6624 while (in_sequence_p ())
6625 end_sequence ();
6627 /* Outside function body, can't compute type's actual size
6628 until next function's body starts. */
6630 free_after_parsing (cfun);
6631 free_after_compilation (cfun);
6632 cfun = 0;
6635 /* Call DOIT for each hard register used as a return value from
6636 the current function. */
6638 void
6639 diddle_return_value (doit, arg)
6640 void (*doit) PARAMS ((rtx, void *));
6641 void *arg;
6643 rtx outgoing = current_function_return_rtx;
6645 if (! outgoing)
6646 return;
6648 if (GET_CODE (outgoing) == REG)
6649 (*doit) (outgoing, arg);
6650 else if (GET_CODE (outgoing) == PARALLEL)
6652 int i;
6654 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6656 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6658 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6659 (*doit) (x, arg);
6664 static void
6665 do_clobber_return_reg (reg, arg)
6666 rtx reg;
6667 void *arg ATTRIBUTE_UNUSED;
6669 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6672 void
6673 clobber_return_register ()
6675 diddle_return_value (do_clobber_return_reg, NULL);
6677 /* In case we do use pseudo to return value, clobber it too. */
6678 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6680 tree decl_result = DECL_RESULT (current_function_decl);
6681 rtx decl_rtl = DECL_RTL (decl_result);
6682 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6684 do_clobber_return_reg (decl_rtl, NULL);
6689 static void
6690 do_use_return_reg (reg, arg)
6691 rtx reg;
6692 void *arg ATTRIBUTE_UNUSED;
6694 emit_insn (gen_rtx_USE (VOIDmode, reg));
6697 void
6698 use_return_register ()
6700 diddle_return_value (do_use_return_reg, NULL);
6703 static GTY(()) rtx initial_trampoline;
6705 /* Generate RTL for the end of the current function.
6706 FILENAME and LINE are the current position in the source file.
6708 It is up to language-specific callers to do cleanups for parameters--
6709 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6711 void
6712 expand_function_end (filename, line, end_bindings)
6713 const char *filename;
6714 int line;
6715 int end_bindings;
6717 tree link;
6718 rtx clobber_after;
6720 finish_expr_for_function ();
6722 /* If arg_pointer_save_area was referenced only from a nested
6723 function, we will not have initialized it yet. Do that now. */
6724 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6725 get_arg_pointer_save_area (cfun);
6727 #ifdef NON_SAVING_SETJMP
6728 /* Don't put any variables in registers if we call setjmp
6729 on a machine that fails to restore the registers. */
6730 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6732 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6733 setjmp_protect (DECL_INITIAL (current_function_decl));
6735 setjmp_protect_args ();
6737 #endif
6739 /* Initialize any trampolines required by this function. */
6740 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6742 tree function = TREE_PURPOSE (link);
6743 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6744 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6745 #ifdef TRAMPOLINE_TEMPLATE
6746 rtx blktramp;
6747 #endif
6748 rtx seq;
6750 #ifdef TRAMPOLINE_TEMPLATE
6751 /* First make sure this compilation has a template for
6752 initializing trampolines. */
6753 if (initial_trampoline == 0)
6755 initial_trampoline
6756 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6757 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6759 #endif
6761 /* Generate insns to initialize the trampoline. */
6762 start_sequence ();
6763 tramp = round_trampoline_addr (XEXP (tramp, 0));
6764 #ifdef TRAMPOLINE_TEMPLATE
6765 blktramp = replace_equiv_address (initial_trampoline, tramp);
6766 emit_block_move (blktramp, initial_trampoline,
6767 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
6768 #endif
6769 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6770 seq = get_insns ();
6771 end_sequence ();
6773 /* Put those insns at entry to the containing function (this one). */
6774 emit_insn_before (seq, tail_recursion_reentry);
6777 /* If we are doing stack checking and this function makes calls,
6778 do a stack probe at the start of the function to ensure we have enough
6779 space for another stack frame. */
6780 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6782 rtx insn, seq;
6784 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6785 if (GET_CODE (insn) == CALL_INSN)
6787 start_sequence ();
6788 probe_stack_range (STACK_CHECK_PROTECT,
6789 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6790 seq = get_insns ();
6791 end_sequence ();
6792 emit_insn_before (seq, tail_recursion_reentry);
6793 break;
6797 /* Warn about unused parms if extra warnings were specified. */
6798 /* Either ``-Wextra -Wunused'' or ``-Wunused-parameter'' enables this
6799 warning. WARN_UNUSED_PARAMETER is negative when set by
6800 -Wunused. Note that -Wall implies -Wunused, so ``-Wall -Wextra'' will
6801 also give these warnings. */
6802 if (warn_unused_parameter > 0
6803 || (warn_unused_parameter < 0 && extra_warnings))
6805 tree decl;
6807 for (decl = DECL_ARGUMENTS (current_function_decl);
6808 decl; decl = TREE_CHAIN (decl))
6809 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6810 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6811 warning_with_decl (decl, "unused parameter `%s'");
6814 /* Delete handlers for nonlocal gotos if nothing uses them. */
6815 if (nonlocal_goto_handler_slots != 0
6816 && ! current_function_has_nonlocal_label)
6817 delete_handlers ();
6819 /* End any sequences that failed to be closed due to syntax errors. */
6820 while (in_sequence_p ())
6821 end_sequence ();
6823 /* Outside function body, can't compute type's actual size
6824 until next function's body starts. */
6825 immediate_size_expand--;
6827 clear_pending_stack_adjust ();
6828 do_pending_stack_adjust ();
6830 /* Mark the end of the function body.
6831 If control reaches this insn, the function can drop through
6832 without returning a value. */
6833 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6835 /* Must mark the last line number note in the function, so that the test
6836 coverage code can avoid counting the last line twice. This just tells
6837 the code to ignore the immediately following line note, since there
6838 already exists a copy of this note somewhere above. This line number
6839 note is still needed for debugging though, so we can't delete it. */
6840 if (flag_test_coverage)
6841 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6843 /* Output a linenumber for the end of the function.
6844 SDB depends on this. */
6845 emit_line_note_force (filename, line);
6847 /* Before the return label (if any), clobber the return
6848 registers so that they are not propagated live to the rest of
6849 the function. This can only happen with functions that drop
6850 through; if there had been a return statement, there would
6851 have either been a return rtx, or a jump to the return label.
6853 We delay actual code generation after the current_function_value_rtx
6854 is computed. */
6855 clobber_after = get_last_insn ();
6857 /* Output the label for the actual return from the function,
6858 if one is expected. This happens either because a function epilogue
6859 is used instead of a return instruction, or because a return was done
6860 with a goto in order to run local cleanups, or because of pcc-style
6861 structure returning. */
6862 if (return_label)
6863 emit_label (return_label);
6865 /* C++ uses this. */
6866 if (end_bindings)
6867 expand_end_bindings (0, 0, 0);
6869 if (current_function_instrument_entry_exit)
6871 rtx fun = DECL_RTL (current_function_decl);
6872 if (GET_CODE (fun) == MEM)
6873 fun = XEXP (fun, 0);
6874 else
6875 abort ();
6876 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6877 2, fun, Pmode,
6878 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6880 hard_frame_pointer_rtx),
6881 Pmode);
6884 /* Let except.c know where it should emit the call to unregister
6885 the function context for sjlj exceptions. */
6886 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6887 sjlj_emit_function_exit_after (get_last_insn ());
6889 /* If we had calls to alloca, and this machine needs
6890 an accurate stack pointer to exit the function,
6891 insert some code to save and restore the stack pointer. */
6892 #ifdef EXIT_IGNORE_STACK
6893 if (! EXIT_IGNORE_STACK)
6894 #endif
6895 if (current_function_calls_alloca)
6897 rtx tem = 0;
6899 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6900 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6903 /* If scalar return value was computed in a pseudo-reg, or was a named
6904 return value that got dumped to the stack, copy that to the hard
6905 return register. */
6906 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6908 tree decl_result = DECL_RESULT (current_function_decl);
6909 rtx decl_rtl = DECL_RTL (decl_result);
6911 if (REG_P (decl_rtl)
6912 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6913 : DECL_REGISTER (decl_result))
6915 rtx real_decl_rtl = current_function_return_rtx;
6917 /* This should be set in assign_parms. */
6918 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
6919 abort ();
6921 /* If this is a BLKmode structure being returned in registers,
6922 then use the mode computed in expand_return. Note that if
6923 decl_rtl is memory, then its mode may have been changed,
6924 but that current_function_return_rtx has not. */
6925 if (GET_MODE (real_decl_rtl) == BLKmode)
6926 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
6928 /* If a named return value dumped decl_return to memory, then
6929 we may need to re-do the PROMOTE_MODE signed/unsigned
6930 extension. */
6931 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6933 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6935 #ifdef PROMOTE_FUNCTION_RETURN
6936 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6937 &unsignedp, 1);
6938 #endif
6940 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6942 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6944 /* If expand_function_start has created a PARALLEL for decl_rtl,
6945 move the result to the real return registers. Otherwise, do
6946 a group load from decl_rtl for a named return. */
6947 if (GET_CODE (decl_rtl) == PARALLEL)
6948 emit_group_move (real_decl_rtl, decl_rtl);
6949 else
6950 emit_group_load (real_decl_rtl, decl_rtl,
6951 int_size_in_bytes (TREE_TYPE (decl_result)));
6953 else
6954 emit_move_insn (real_decl_rtl, decl_rtl);
6958 /* If returning a structure, arrange to return the address of the value
6959 in a place where debuggers expect to find it.
6961 If returning a structure PCC style,
6962 the caller also depends on this value.
6963 And current_function_returns_pcc_struct is not necessarily set. */
6964 if (current_function_returns_struct
6965 || current_function_returns_pcc_struct)
6967 rtx value_address
6968 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6969 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6970 #ifdef FUNCTION_OUTGOING_VALUE
6971 rtx outgoing
6972 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6973 current_function_decl);
6974 #else
6975 rtx outgoing
6976 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6977 #endif
6979 /* Mark this as a function return value so integrate will delete the
6980 assignment and USE below when inlining this function. */
6981 REG_FUNCTION_VALUE_P (outgoing) = 1;
6983 #ifdef POINTERS_EXTEND_UNSIGNED
6984 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6985 if (GET_MODE (outgoing) != GET_MODE (value_address))
6986 value_address = convert_memory_address (GET_MODE (outgoing),
6987 value_address);
6988 #endif
6990 emit_move_insn (outgoing, value_address);
6992 /* Show return register used to hold result (in this case the address
6993 of the result. */
6994 current_function_return_rtx = outgoing;
6997 /* If this is an implementation of throw, do what's necessary to
6998 communicate between __builtin_eh_return and the epilogue. */
6999 expand_eh_return ();
7001 /* Emit the actual code to clobber return register. */
7003 rtx seq, after;
7005 start_sequence ();
7006 clobber_return_register ();
7007 seq = get_insns ();
7008 end_sequence ();
7010 after = emit_insn_after (seq, clobber_after);
7012 if (clobber_after != after)
7013 cfun->x_clobber_return_insn = after;
7016 /* ??? This should no longer be necessary since stupid is no longer with
7017 us, but there are some parts of the compiler (eg reload_combine, and
7018 sh mach_dep_reorg) that still try and compute their own lifetime info
7019 instead of using the general framework. */
7020 use_return_register ();
7022 /* Fix up any gotos that jumped out to the outermost
7023 binding level of the function.
7024 Must follow emitting RETURN_LABEL. */
7026 /* If you have any cleanups to do at this point,
7027 and they need to create temporary variables,
7028 then you will lose. */
7029 expand_fixups (get_insns ());
7033 get_arg_pointer_save_area (f)
7034 struct function *f;
7036 rtx ret = f->x_arg_pointer_save_area;
7038 if (! ret)
7040 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7041 f->x_arg_pointer_save_area = ret;
7044 if (f == cfun && ! f->arg_pointer_save_area_init)
7046 rtx seq;
7048 /* Save the arg pointer at the beginning of the function. The
7049 generated stack slot may not be a valid memory address, so we
7050 have to check it and fix it if necessary. */
7051 start_sequence ();
7052 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7053 seq = get_insns ();
7054 end_sequence ();
7056 push_topmost_sequence ();
7057 emit_insn_after (seq, get_insns ());
7058 pop_topmost_sequence ();
7061 return ret;
7064 /* Extend a vector that records the INSN_UIDs of INSNS
7065 (a list of one or more insns). */
7067 static void
7068 record_insns (insns, vecp)
7069 rtx insns;
7070 varray_type *vecp;
7072 int i, len;
7073 rtx tmp;
7075 tmp = insns;
7076 len = 0;
7077 while (tmp != NULL_RTX)
7079 len++;
7080 tmp = NEXT_INSN (tmp);
7083 i = VARRAY_SIZE (*vecp);
7084 VARRAY_GROW (*vecp, i + len);
7085 tmp = insns;
7086 while (tmp != NULL_RTX)
7088 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7089 i++;
7090 tmp = NEXT_INSN (tmp);
7094 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7095 be running after reorg, SEQUENCE rtl is possible. */
7097 static int
7098 contains (insn, vec)
7099 rtx insn;
7100 varray_type vec;
7102 int i, j;
7104 if (GET_CODE (insn) == INSN
7105 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7107 int count = 0;
7108 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7109 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7110 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7111 count++;
7112 return count;
7114 else
7116 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7117 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7118 return 1;
7120 return 0;
7124 prologue_epilogue_contains (insn)
7125 rtx insn;
7127 if (contains (insn, prologue))
7128 return 1;
7129 if (contains (insn, epilogue))
7130 return 1;
7131 return 0;
7135 sibcall_epilogue_contains (insn)
7136 rtx insn;
7138 if (sibcall_epilogue)
7139 return contains (insn, sibcall_epilogue);
7140 return 0;
7143 #ifdef HAVE_return
7144 /* Insert gen_return at the end of block BB. This also means updating
7145 block_for_insn appropriately. */
7147 static void
7148 emit_return_into_block (bb, line_note)
7149 basic_block bb;
7150 rtx line_note;
7152 emit_jump_insn_after (gen_return (), bb->end);
7153 if (line_note)
7154 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7155 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7157 #endif /* HAVE_return */
7159 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7161 /* These functions convert the epilogue into a variant that does not modify the
7162 stack pointer. This is used in cases where a function returns an object
7163 whose size is not known until it is computed. The called function leaves the
7164 object on the stack, leaves the stack depressed, and returns a pointer to
7165 the object.
7167 What we need to do is track all modifications and references to the stack
7168 pointer, deleting the modifications and changing the references to point to
7169 the location the stack pointer would have pointed to had the modifications
7170 taken place.
7172 These functions need to be portable so we need to make as few assumptions
7173 about the epilogue as we can. However, the epilogue basically contains
7174 three things: instructions to reset the stack pointer, instructions to
7175 reload registers, possibly including the frame pointer, and an
7176 instruction to return to the caller.
7178 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7179 We also make no attempt to validate the insns we make since if they are
7180 invalid, we probably can't do anything valid. The intent is that these
7181 routines get "smarter" as more and more machines start to use them and
7182 they try operating on different epilogues.
7184 We use the following structure to track what the part of the epilogue that
7185 we've already processed has done. We keep two copies of the SP equivalence,
7186 one for use during the insn we are processing and one for use in the next
7187 insn. The difference is because one part of a PARALLEL may adjust SP
7188 and the other may use it. */
7190 struct epi_info
7192 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7193 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7194 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7195 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7196 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7197 should be set to once we no longer need
7198 its value. */
7201 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7202 static void emit_equiv_load PARAMS ((struct epi_info *));
7204 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7205 no modifications to the stack pointer. Return the new list of insns. */
7207 static rtx
7208 keep_stack_depressed (insns)
7209 rtx insns;
7211 int j;
7212 struct epi_info info;
7213 rtx insn, next;
7215 /* If the epilogue is just a single instruction, it ust be OK as is. */
7217 if (NEXT_INSN (insns) == NULL_RTX)
7218 return insns;
7220 /* Otherwise, start a sequence, initialize the information we have, and
7221 process all the insns we were given. */
7222 start_sequence ();
7224 info.sp_equiv_reg = stack_pointer_rtx;
7225 info.sp_offset = 0;
7226 info.equiv_reg_src = 0;
7228 insn = insns;
7229 next = NULL_RTX;
7230 while (insn != NULL_RTX)
7232 next = NEXT_INSN (insn);
7234 if (!INSN_P (insn))
7236 add_insn (insn);
7237 insn = next;
7238 continue;
7241 /* If this insn references the register that SP is equivalent to and
7242 we have a pending load to that register, we must force out the load
7243 first and then indicate we no longer know what SP's equivalent is. */
7244 if (info.equiv_reg_src != 0
7245 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7247 emit_equiv_load (&info);
7248 info.sp_equiv_reg = 0;
7251 info.new_sp_equiv_reg = info.sp_equiv_reg;
7252 info.new_sp_offset = info.sp_offset;
7254 /* If this is a (RETURN) and the return address is on the stack,
7255 update the address and change to an indirect jump. */
7256 if (GET_CODE (PATTERN (insn)) == RETURN
7257 || (GET_CODE (PATTERN (insn)) == PARALLEL
7258 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7260 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7261 rtx base = 0;
7262 HOST_WIDE_INT offset = 0;
7263 rtx jump_insn, jump_set;
7265 /* If the return address is in a register, we can emit the insn
7266 unchanged. Otherwise, it must be a MEM and we see what the
7267 base register and offset are. In any case, we have to emit any
7268 pending load to the equivalent reg of SP, if any. */
7269 if (GET_CODE (retaddr) == REG)
7271 emit_equiv_load (&info);
7272 add_insn (insn);
7273 insn = next;
7274 continue;
7276 else if (GET_CODE (retaddr) == MEM
7277 && GET_CODE (XEXP (retaddr, 0)) == REG)
7278 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7279 else if (GET_CODE (retaddr) == MEM
7280 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7281 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7282 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7284 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7285 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7287 else
7288 abort ();
7290 /* If the base of the location containing the return pointer
7291 is SP, we must update it with the replacement address. Otherwise,
7292 just build the necessary MEM. */
7293 retaddr = plus_constant (base, offset);
7294 if (base == stack_pointer_rtx)
7295 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7296 plus_constant (info.sp_equiv_reg,
7297 info.sp_offset));
7299 retaddr = gen_rtx_MEM (Pmode, retaddr);
7301 /* If there is a pending load to the equivalent register for SP
7302 and we reference that register, we must load our address into
7303 a scratch register and then do that load. */
7304 if (info.equiv_reg_src
7305 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7307 unsigned int regno;
7308 rtx reg;
7310 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7311 if (HARD_REGNO_MODE_OK (regno, Pmode)
7312 && !fixed_regs[regno]
7313 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7314 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7315 regno)
7316 && !refers_to_regno_p (regno,
7317 regno + HARD_REGNO_NREGS (regno,
7318 Pmode),
7319 info.equiv_reg_src, NULL))
7320 break;
7322 if (regno == FIRST_PSEUDO_REGISTER)
7323 abort ();
7325 reg = gen_rtx_REG (Pmode, regno);
7326 emit_move_insn (reg, retaddr);
7327 retaddr = reg;
7330 emit_equiv_load (&info);
7331 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7333 /* Show the SET in the above insn is a RETURN. */
7334 jump_set = single_set (jump_insn);
7335 if (jump_set == 0)
7336 abort ();
7337 else
7338 SET_IS_RETURN_P (jump_set) = 1;
7341 /* If SP is not mentioned in the pattern and its equivalent register, if
7342 any, is not modified, just emit it. Otherwise, if neither is set,
7343 replace the reference to SP and emit the insn. If none of those are
7344 true, handle each SET individually. */
7345 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7346 && (info.sp_equiv_reg == stack_pointer_rtx
7347 || !reg_set_p (info.sp_equiv_reg, insn)))
7348 add_insn (insn);
7349 else if (! reg_set_p (stack_pointer_rtx, insn)
7350 && (info.sp_equiv_reg == stack_pointer_rtx
7351 || !reg_set_p (info.sp_equiv_reg, insn)))
7353 if (! validate_replace_rtx (stack_pointer_rtx,
7354 plus_constant (info.sp_equiv_reg,
7355 info.sp_offset),
7356 insn))
7357 abort ();
7359 add_insn (insn);
7361 else if (GET_CODE (PATTERN (insn)) == SET)
7362 handle_epilogue_set (PATTERN (insn), &info);
7363 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7365 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7366 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7367 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7369 else
7370 add_insn (insn);
7372 info.sp_equiv_reg = info.new_sp_equiv_reg;
7373 info.sp_offset = info.new_sp_offset;
7375 insn = next;
7378 insns = get_insns ();
7379 end_sequence ();
7380 return insns;
7383 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7384 structure that contains information about what we've seen so far. We
7385 process this SET by either updating that data or by emitting one or
7386 more insns. */
7388 static void
7389 handle_epilogue_set (set, p)
7390 rtx set;
7391 struct epi_info *p;
7393 /* First handle the case where we are setting SP. Record what it is being
7394 set from. If unknown, abort. */
7395 if (reg_set_p (stack_pointer_rtx, set))
7397 if (SET_DEST (set) != stack_pointer_rtx)
7398 abort ();
7400 if (GET_CODE (SET_SRC (set)) == PLUS
7401 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7403 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7404 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7406 else
7407 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7409 /* If we are adjusting SP, we adjust from the old data. */
7410 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7412 p->new_sp_equiv_reg = p->sp_equiv_reg;
7413 p->new_sp_offset += p->sp_offset;
7416 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7417 abort ();
7419 return;
7422 /* Next handle the case where we are setting SP's equivalent register.
7423 If we already have a value to set it to, abort. We could update, but
7424 there seems little point in handling that case. Note that we have
7425 to allow for the case where we are setting the register set in
7426 the previous part of a PARALLEL inside a single insn. But use the
7427 old offset for any updates within this insn. */
7428 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7430 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7431 || p->equiv_reg_src != 0)
7432 abort ();
7433 else
7434 p->equiv_reg_src
7435 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7436 plus_constant (p->sp_equiv_reg,
7437 p->sp_offset));
7440 /* Otherwise, replace any references to SP in the insn to its new value
7441 and emit the insn. */
7442 else
7444 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7445 plus_constant (p->sp_equiv_reg,
7446 p->sp_offset));
7447 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7448 plus_constant (p->sp_equiv_reg,
7449 p->sp_offset));
7450 emit_insn (set);
7454 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7456 static void
7457 emit_equiv_load (p)
7458 struct epi_info *p;
7460 if (p->equiv_reg_src != 0)
7461 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7463 p->equiv_reg_src = 0;
7465 #endif
7467 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7468 this into place with notes indicating where the prologue ends and where
7469 the epilogue begins. Update the basic block information when possible. */
7471 void
7472 thread_prologue_and_epilogue_insns (f)
7473 rtx f ATTRIBUTE_UNUSED;
7475 int inserted = 0;
7476 edge e;
7477 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7478 rtx seq;
7479 #endif
7480 #ifdef HAVE_prologue
7481 rtx prologue_end = NULL_RTX;
7482 #endif
7483 #if defined (HAVE_epilogue) || defined(HAVE_return)
7484 rtx epilogue_end = NULL_RTX;
7485 #endif
7487 #ifdef HAVE_prologue
7488 if (HAVE_prologue)
7490 start_sequence ();
7491 seq = gen_prologue ();
7492 emit_insn (seq);
7494 /* Retain a map of the prologue insns. */
7495 record_insns (seq, &prologue);
7496 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7498 seq = get_insns ();
7499 end_sequence ();
7501 /* Can't deal with multiple successors of the entry block
7502 at the moment. Function should always have at least one
7503 entry point. */
7504 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7505 abort ();
7507 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7508 inserted = 1;
7510 #endif
7512 /* If the exit block has no non-fake predecessors, we don't need
7513 an epilogue. */
7514 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7515 if ((e->flags & EDGE_FAKE) == 0)
7516 break;
7517 if (e == NULL)
7518 goto epilogue_done;
7520 #ifdef HAVE_return
7521 if (optimize && HAVE_return)
7523 /* If we're allowed to generate a simple return instruction,
7524 then by definition we don't need a full epilogue. Examine
7525 the block that falls through to EXIT. If it does not
7526 contain any code, examine its predecessors and try to
7527 emit (conditional) return instructions. */
7529 basic_block last;
7530 edge e_next;
7531 rtx label;
7533 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7534 if (e->flags & EDGE_FALLTHRU)
7535 break;
7536 if (e == NULL)
7537 goto epilogue_done;
7538 last = e->src;
7540 /* Verify that there are no active instructions in the last block. */
7541 label = last->end;
7542 while (label && GET_CODE (label) != CODE_LABEL)
7544 if (active_insn_p (label))
7545 break;
7546 label = PREV_INSN (label);
7549 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7551 rtx epilogue_line_note = NULL_RTX;
7553 /* Locate the line number associated with the closing brace,
7554 if we can find one. */
7555 for (seq = get_last_insn ();
7556 seq && ! active_insn_p (seq);
7557 seq = PREV_INSN (seq))
7558 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7560 epilogue_line_note = seq;
7561 break;
7564 for (e = last->pred; e; e = e_next)
7566 basic_block bb = e->src;
7567 rtx jump;
7569 e_next = e->pred_next;
7570 if (bb == ENTRY_BLOCK_PTR)
7571 continue;
7573 jump = bb->end;
7574 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7575 continue;
7577 /* If we have an unconditional jump, we can replace that
7578 with a simple return instruction. */
7579 if (simplejump_p (jump))
7581 emit_return_into_block (bb, epilogue_line_note);
7582 delete_insn (jump);
7585 /* If we have a conditional jump, we can try to replace
7586 that with a conditional return instruction. */
7587 else if (condjump_p (jump))
7589 rtx ret, *loc;
7591 ret = SET_SRC (PATTERN (jump));
7592 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7593 loc = &XEXP (ret, 1);
7594 else
7595 loc = &XEXP (ret, 2);
7596 ret = gen_rtx_RETURN (VOIDmode);
7598 if (! validate_change (jump, loc, ret, 0))
7599 continue;
7600 if (JUMP_LABEL (jump))
7601 LABEL_NUSES (JUMP_LABEL (jump))--;
7603 /* If this block has only one successor, it both jumps
7604 and falls through to the fallthru block, so we can't
7605 delete the edge. */
7606 if (bb->succ->succ_next == NULL)
7607 continue;
7609 else
7610 continue;
7612 /* Fix up the CFG for the successful change we just made. */
7613 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7616 /* Emit a return insn for the exit fallthru block. Whether
7617 this is still reachable will be determined later. */
7619 emit_barrier_after (last->end);
7620 emit_return_into_block (last, epilogue_line_note);
7621 epilogue_end = last->end;
7622 last->succ->flags &= ~EDGE_FALLTHRU;
7623 goto epilogue_done;
7626 #endif
7627 #ifdef HAVE_epilogue
7628 if (HAVE_epilogue)
7630 /* Find the edge that falls through to EXIT. Other edges may exist
7631 due to RETURN instructions, but those don't need epilogues.
7632 There really shouldn't be a mixture -- either all should have
7633 been converted or none, however... */
7635 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7636 if (e->flags & EDGE_FALLTHRU)
7637 break;
7638 if (e == NULL)
7639 goto epilogue_done;
7641 start_sequence ();
7642 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7644 seq = gen_epilogue ();
7646 #ifdef INCOMING_RETURN_ADDR_RTX
7647 /* If this function returns with the stack depressed and we can support
7648 it, massage the epilogue to actually do that. */
7649 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7650 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7651 seq = keep_stack_depressed (seq);
7652 #endif
7654 emit_jump_insn (seq);
7656 /* Retain a map of the epilogue insns. */
7657 record_insns (seq, &epilogue);
7659 seq = get_insns ();
7660 end_sequence ();
7662 insert_insn_on_edge (seq, e);
7663 inserted = 1;
7665 #endif
7666 epilogue_done:
7668 if (inserted)
7669 commit_edge_insertions ();
7671 #ifdef HAVE_sibcall_epilogue
7672 /* Emit sibling epilogues before any sibling call sites. */
7673 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7675 basic_block bb = e->src;
7676 rtx insn = bb->end;
7677 rtx i;
7678 rtx newinsn;
7680 if (GET_CODE (insn) != CALL_INSN
7681 || ! SIBLING_CALL_P (insn))
7682 continue;
7684 start_sequence ();
7685 emit_insn (gen_sibcall_epilogue ());
7686 seq = get_insns ();
7687 end_sequence ();
7689 /* Retain a map of the epilogue insns. Used in life analysis to
7690 avoid getting rid of sibcall epilogue insns. Do this before we
7691 actually emit the sequence. */
7692 record_insns (seq, &sibcall_epilogue);
7694 i = PREV_INSN (insn);
7695 newinsn = emit_insn_before (seq, insn);
7697 #endif
7699 #ifdef HAVE_prologue
7700 if (prologue_end)
7702 rtx insn, prev;
7704 /* GDB handles `break f' by setting a breakpoint on the first
7705 line note after the prologue. Which means (1) that if
7706 there are line number notes before where we inserted the
7707 prologue we should move them, and (2) we should generate a
7708 note before the end of the first basic block, if there isn't
7709 one already there.
7711 ??? This behavior is completely broken when dealing with
7712 multiple entry functions. We simply place the note always
7713 into first basic block and let alternate entry points
7714 to be missed.
7717 for (insn = prologue_end; insn; insn = prev)
7719 prev = PREV_INSN (insn);
7720 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7722 /* Note that we cannot reorder the first insn in the
7723 chain, since rest_of_compilation relies on that
7724 remaining constant. */
7725 if (prev == NULL)
7726 break;
7727 reorder_insns (insn, insn, prologue_end);
7731 /* Find the last line number note in the first block. */
7732 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7733 insn != prologue_end && insn;
7734 insn = PREV_INSN (insn))
7735 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7736 break;
7738 /* If we didn't find one, make a copy of the first line number
7739 we run across. */
7740 if (! insn)
7742 for (insn = next_active_insn (prologue_end);
7743 insn;
7744 insn = PREV_INSN (insn))
7745 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7747 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7748 NOTE_LINE_NUMBER (insn),
7749 prologue_end);
7750 break;
7754 #endif
7755 #ifdef HAVE_epilogue
7756 if (epilogue_end)
7758 rtx insn, next;
7760 /* Similarly, move any line notes that appear after the epilogue.
7761 There is no need, however, to be quite so anal about the existence
7762 of such a note. */
7763 for (insn = epilogue_end; insn; insn = next)
7765 next = NEXT_INSN (insn);
7766 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7767 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7770 #endif
7773 /* Reposition the prologue-end and epilogue-begin notes after instruction
7774 scheduling and delayed branch scheduling. */
7776 void
7777 reposition_prologue_and_epilogue_notes (f)
7778 rtx f ATTRIBUTE_UNUSED;
7780 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7781 rtx insn, last, note;
7782 int len;
7784 if ((len = VARRAY_SIZE (prologue)) > 0)
7786 last = 0, note = 0;
7788 /* Scan from the beginning until we reach the last prologue insn.
7789 We apparently can't depend on basic_block_{head,end} after
7790 reorg has run. */
7791 for (insn = f; insn; insn = NEXT_INSN (insn))
7793 if (GET_CODE (insn) == NOTE)
7795 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7796 note = insn;
7798 else if (contains (insn, prologue))
7800 last = insn;
7801 if (--len == 0)
7802 break;
7806 if (last)
7808 /* Find the prologue-end note if we haven't already, and
7809 move it to just after the last prologue insn. */
7810 if (note == 0)
7812 for (note = last; (note = NEXT_INSN (note));)
7813 if (GET_CODE (note) == NOTE
7814 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7815 break;
7818 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7819 if (GET_CODE (last) == CODE_LABEL)
7820 last = NEXT_INSN (last);
7821 reorder_insns (note, note, last);
7825 if ((len = VARRAY_SIZE (epilogue)) > 0)
7827 last = 0, note = 0;
7829 /* Scan from the end until we reach the first epilogue insn.
7830 We apparently can't depend on basic_block_{head,end} after
7831 reorg has run. */
7832 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7834 if (GET_CODE (insn) == NOTE)
7836 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7837 note = insn;
7839 else if (contains (insn, epilogue))
7841 last = insn;
7842 if (--len == 0)
7843 break;
7847 if (last)
7849 /* Find the epilogue-begin note if we haven't already, and
7850 move it to just before the first epilogue insn. */
7851 if (note == 0)
7853 for (note = insn; (note = PREV_INSN (note));)
7854 if (GET_CODE (note) == NOTE
7855 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7856 break;
7859 if (PREV_INSN (last) != note)
7860 reorder_insns (note, note, PREV_INSN (last));
7863 #endif /* HAVE_prologue or HAVE_epilogue */
7866 /* Called once, at initialization, to initialize function.c. */
7868 void
7869 init_function_once ()
7871 VARRAY_INT_INIT (prologue, 0, "prologue");
7872 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7873 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7876 #include "gt-function.h"