* config/i386/uwin.h: Remove SUBTARGET_PROLOGUE.
[official-gcc.git] / gcc / function.c
blobc090d4aa869dac8b3f39caf4e63fb95719b5d790
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "optabs.h"
52 #include "libfuncs.h"
53 #include "regs.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
56 #include "recog.h"
57 #include "output.h"
58 #include "basic-block.h"
59 #include "toplev.h"
60 #include "hashtab.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63 #include "integrate.h"
64 #include "langhooks.h"
66 #ifndef TRAMPOLINE_ALIGNMENT
67 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
68 #endif
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #endif
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
96 during rtl generation. If they are different register numbers, this is
97 always true. It may also be true if
98 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
99 generation. See fix_lexical_addr for details. */
101 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
102 #define NEED_SEPARATE_AP
103 #endif
105 /* Nonzero if function being compiled doesn't contain any calls
106 (ignoring the prologue and epilogue). This is set prior to
107 local register allocation and is valid for the remaining
108 compiler passes. */
109 int current_function_is_leaf;
111 /* Nonzero if function being compiled doesn't contain any instructions
112 that can throw an exception. This is set prior to final. */
114 int current_function_nothrow;
116 /* Nonzero if function being compiled doesn't modify the stack pointer
117 (ignoring the prologue and epilogue). This is only valid after
118 life_analysis has run. */
119 int current_function_sp_is_unchanging;
121 /* Nonzero if the function being compiled is a leaf function which only
122 uses leaf registers. This is valid after reload (specifically after
123 sched2) and is useful only if the port defines LEAF_REGISTERS. */
124 int current_function_uses_only_leaf_regs;
126 /* Nonzero once virtual register instantiation has been done.
127 assign_stack_local uses frame_pointer_rtx when this is nonzero.
128 calls.c:emit_library_call_value_1 uses it to set up
129 post-instantiation libcalls. */
130 int virtuals_instantiated;
132 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
133 static GTY(()) int funcdef_no;
135 /* These variables hold pointers to functions to create and destroy
136 target specific, per-function data structures. */
137 struct machine_function * (*init_machine_status) PARAMS ((void));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
142 /* The currently compiled function. */
143 struct function *cfun = 0;
145 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
146 static GTY(()) varray_type prologue;
147 static GTY(()) varray_type epilogue;
149 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
150 in this function. */
151 static GTY(()) varray_type sibcall_epilogue;
153 /* In order to evaluate some expressions, such as function calls returning
154 structures in memory, we need to temporarily allocate stack locations.
155 We record each allocated temporary in the following structure.
157 Associated with each temporary slot is a nesting level. When we pop up
158 one level, all temporaries associated with the previous level are freed.
159 Normally, all temporaries are freed after the execution of the statement
160 in which they were created. However, if we are inside a ({...}) grouping,
161 the result may be in a temporary and hence must be preserved. If the
162 result could be in a temporary, we preserve it if we can determine which
163 one it is in. If we cannot determine which temporary may contain the
164 result, all temporaries are preserved. A temporary is preserved by
165 pretending it was allocated at the previous nesting level.
167 Automatic variables are also assigned temporary slots, at the nesting
168 level where they are defined. They are marked a "kept" so that
169 free_temp_slots will not free them. */
171 struct temp_slot GTY(())
173 /* Points to next temporary slot. */
174 struct temp_slot *next;
175 /* The rtx to used to reference the slot. */
176 rtx slot;
177 /* The rtx used to represent the address if not the address of the
178 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 rtx address;
180 /* The alignment (in bits) of the slot. */
181 unsigned int align;
182 /* The size, in units, of the slot. */
183 HOST_WIDE_INT size;
184 /* The type of the object in the slot, or zero if it doesn't correspond
185 to a type. We use this to determine whether a slot can be reused.
186 It can be reused if objects of the type of the new slot will always
187 conflict with objects of the type of the old slot. */
188 tree type;
189 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
190 tree rtl_expr;
191 /* Nonzero if this temporary is currently in use. */
192 char in_use;
193 /* Nonzero if this temporary has its address taken. */
194 char addr_taken;
195 /* Nesting level at which this slot is being used. */
196 int level;
197 /* Nonzero if this should survive a call to free_temp_slots. */
198 int keep;
199 /* The offset of the slot from the frame_pointer, including extra space
200 for alignment. This info is for combine_temp_slots. */
201 HOST_WIDE_INT base_offset;
202 /* The size of the slot, including extra space for alignment. This
203 info is for combine_temp_slots. */
204 HOST_WIDE_INT full_size;
207 /* This structure is used to record MEMs or pseudos used to replace VAR, any
208 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
209 maintain this list in case two operands of an insn were required to match;
210 in that case we must ensure we use the same replacement. */
212 struct fixup_replacement GTY(())
214 rtx old;
215 rtx new;
216 struct fixup_replacement *next;
219 struct insns_for_mem_entry
221 /* A MEM. */
222 rtx key;
223 /* These are the INSNs which reference the MEM. */
224 rtx insns;
227 /* Forward declarations. */
229 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
230 int, struct function *));
231 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
232 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
233 enum machine_mode, enum machine_mode,
234 int, unsigned int, int,
235 htab_t));
236 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
237 enum machine_mode,
238 htab_t));
239 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
240 htab_t));
241 static struct fixup_replacement
242 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
243 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
244 int, int, rtx));
245 static void fixup_var_refs_insns_with_hash
246 PARAMS ((htab_t, rtx,
247 enum machine_mode, int, rtx));
248 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
249 int, int, rtx));
250 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
251 struct fixup_replacement **, rtx));
252 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
253 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
254 int));
255 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
256 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
257 static void instantiate_decls PARAMS ((tree, int));
258 static void instantiate_decls_1 PARAMS ((tree, int));
259 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
260 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
261 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
262 static void delete_handlers PARAMS ((void));
263 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
264 struct args_size *));
265 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
266 tree));
267 static rtx round_trampoline_addr PARAMS ((rtx));
268 static rtx adjust_trampoline_addr PARAMS ((rtx));
269 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
270 static void reorder_blocks_0 PARAMS ((tree));
271 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
272 static void reorder_fix_fragments PARAMS ((tree));
273 static tree blocks_nreverse PARAMS ((tree));
274 static int all_blocks PARAMS ((tree, tree *));
275 static tree *get_block_vector PARAMS ((tree, int *));
276 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
277 /* We always define `record_insns' even if its not used so that we
278 can always export `prologue_epilogue_contains'. */
279 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
280 static int contains PARAMS ((rtx, varray_type));
281 #ifdef HAVE_return
282 static void emit_return_into_block PARAMS ((basic_block, rtx));
283 #endif
284 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
285 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int, int, htab_t));
286 static void purge_single_hard_subreg_set PARAMS ((rtx));
287 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
288 static rtx keep_stack_depressed PARAMS ((rtx));
289 #endif
290 static int is_addressof PARAMS ((rtx *, void *));
291 static hashval_t insns_for_mem_hash PARAMS ((const void *));
292 static int insns_for_mem_comp PARAMS ((const void *, const void *));
293 static int insns_for_mem_walk PARAMS ((rtx *, void *));
294 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
295 static void prepare_function_start PARAMS ((void));
296 static void do_clobber_return_reg PARAMS ((rtx, void *));
297 static void do_use_return_reg PARAMS ((rtx, void *));
298 static void instantiate_virtual_regs_lossage PARAMS ((rtx));
299 static tree split_complex_args (tree);
301 /* Pointer to chain of `struct function' for containing functions. */
302 static GTY(()) struct function *outer_function_chain;
304 /* List of insns that were postponed by purge_addressof_1. */
305 static rtx postponed_insns;
307 /* Given a function decl for a containing function,
308 return the `struct function' for it. */
310 struct function *
311 find_function_data (decl)
312 tree decl;
314 struct function *p;
316 for (p = outer_function_chain; p; p = p->outer)
317 if (p->decl == decl)
318 return p;
320 abort ();
323 /* Save the current context for compilation of a nested function.
324 This is called from language-specific code. The caller should use
325 the enter_nested langhook to save any language-specific state,
326 since this function knows only about language-independent
327 variables. */
329 void
330 push_function_context_to (context)
331 tree context;
333 struct function *p;
335 if (context)
337 if (context == current_function_decl)
338 cfun->contains_functions = 1;
339 else
341 struct function *containing = find_function_data (context);
342 containing->contains_functions = 1;
346 if (cfun == 0)
347 init_dummy_function_start ();
348 p = cfun;
350 p->outer = outer_function_chain;
351 outer_function_chain = p;
352 p->fixup_var_refs_queue = 0;
354 (*lang_hooks.function.enter_nested) (p);
356 cfun = 0;
359 void
360 push_function_context ()
362 push_function_context_to (current_function_decl);
365 /* Restore the last saved context, at the end of a nested function.
366 This function is called from language-specific code. */
368 void
369 pop_function_context_from (context)
370 tree context ATTRIBUTE_UNUSED;
372 struct function *p = outer_function_chain;
373 struct var_refs_queue *queue;
375 cfun = p;
376 outer_function_chain = p->outer;
378 current_function_decl = p->decl;
379 reg_renumber = 0;
381 restore_emit_status (p);
383 (*lang_hooks.function.leave_nested) (p);
385 /* Finish doing put_var_into_stack for any of our variables which became
386 addressable during the nested function. If only one entry has to be
387 fixed up, just do that one. Otherwise, first make a list of MEMs that
388 are not to be unshared. */
389 if (p->fixup_var_refs_queue == 0)
391 else if (p->fixup_var_refs_queue->next == 0)
392 fixup_var_refs (p->fixup_var_refs_queue->modified,
393 p->fixup_var_refs_queue->promoted_mode,
394 p->fixup_var_refs_queue->unsignedp,
395 p->fixup_var_refs_queue->modified, 0);
396 else
398 rtx list = 0;
400 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
401 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
403 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
404 fixup_var_refs (queue->modified, queue->promoted_mode,
405 queue->unsignedp, list, 0);
409 p->fixup_var_refs_queue = 0;
411 /* Reset variables that have known state during rtx generation. */
412 rtx_equal_function_value_matters = 1;
413 virtuals_instantiated = 0;
414 generating_concat_p = 1;
417 void
418 pop_function_context ()
420 pop_function_context_from (current_function_decl);
423 /* Clear out all parts of the state in F that can safely be discarded
424 after the function has been parsed, but not compiled, to let
425 garbage collection reclaim the memory. */
427 void
428 free_after_parsing (f)
429 struct function *f;
431 /* f->expr->forced_labels is used by code generation. */
432 /* f->emit->regno_reg_rtx is used by code generation. */
433 /* f->varasm is used by code generation. */
434 /* f->eh->eh_return_stub_label is used by code generation. */
436 (*lang_hooks.function.final) (f);
437 f->stmt = NULL;
440 /* Clear out all parts of the state in F that can safely be discarded
441 after the function has been compiled, to let garbage collection
442 reclaim the memory. */
444 void
445 free_after_compilation (f)
446 struct function *f;
448 f->eh = NULL;
449 f->expr = NULL;
450 f->emit = NULL;
451 f->varasm = NULL;
452 f->machine = NULL;
454 f->x_temp_slots = NULL;
455 f->arg_offset_rtx = NULL;
456 f->return_rtx = NULL;
457 f->internal_arg_pointer = NULL;
458 f->x_nonlocal_labels = NULL;
459 f->x_nonlocal_goto_handler_slots = NULL;
460 f->x_nonlocal_goto_handler_labels = NULL;
461 f->x_nonlocal_goto_stack_level = NULL;
462 f->x_cleanup_label = NULL;
463 f->x_return_label = NULL;
464 f->computed_goto_common_label = NULL;
465 f->computed_goto_common_reg = NULL;
466 f->x_save_expr_regs = NULL;
467 f->x_stack_slot_list = NULL;
468 f->x_rtl_expr_chain = NULL;
469 f->x_tail_recursion_label = NULL;
470 f->x_tail_recursion_reentry = NULL;
471 f->x_arg_pointer_save_area = NULL;
472 f->x_clobber_return_insn = NULL;
473 f->x_context_display = NULL;
474 f->x_trampoline_list = NULL;
475 f->x_parm_birth_insn = NULL;
476 f->x_last_parm_insn = NULL;
477 f->x_parm_reg_stack_loc = NULL;
478 f->fixup_var_refs_queue = NULL;
479 f->original_arg_vector = NULL;
480 f->original_decl_initial = NULL;
481 f->inl_last_parm_insn = NULL;
482 f->epilogue_delay_list = NULL;
485 /* Allocate fixed slots in the stack frame of the current function. */
487 /* Return size needed for stack frame based on slots so far allocated in
488 function F.
489 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
490 the caller may have to do that. */
492 HOST_WIDE_INT
493 get_func_frame_size (f)
494 struct function *f;
496 #ifdef FRAME_GROWS_DOWNWARD
497 return -f->x_frame_offset;
498 #else
499 return f->x_frame_offset;
500 #endif
503 /* Return size needed for stack frame based on slots so far allocated.
504 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
505 the caller may have to do that. */
506 HOST_WIDE_INT
507 get_frame_size ()
509 return get_func_frame_size (cfun);
512 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
513 with machine mode MODE.
515 ALIGN controls the amount of alignment for the address of the slot:
516 0 means according to MODE,
517 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
518 positive specifies alignment boundary in bits.
520 We do not round to stack_boundary here.
522 FUNCTION specifies the function to allocate in. */
524 static rtx
525 assign_stack_local_1 (mode, size, align, function)
526 enum machine_mode mode;
527 HOST_WIDE_INT size;
528 int align;
529 struct function *function;
531 rtx x, addr;
532 int bigend_correction = 0;
533 int alignment;
534 int frame_off, frame_alignment, frame_phase;
536 if (align == 0)
538 tree type;
540 if (mode == BLKmode)
541 alignment = BIGGEST_ALIGNMENT;
542 else
543 alignment = GET_MODE_ALIGNMENT (mode);
545 /* Allow the target to (possibly) increase the alignment of this
546 stack slot. */
547 type = (*lang_hooks.types.type_for_mode) (mode, 0);
548 if (type)
549 alignment = LOCAL_ALIGNMENT (type, alignment);
551 alignment /= BITS_PER_UNIT;
553 else if (align == -1)
555 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
556 size = CEIL_ROUND (size, alignment);
558 else
559 alignment = align / BITS_PER_UNIT;
561 #ifdef FRAME_GROWS_DOWNWARD
562 function->x_frame_offset -= size;
563 #endif
565 /* Ignore alignment we can't do with expected alignment of the boundary. */
566 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
567 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
569 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
570 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
572 /* Calculate how many bytes the start of local variables is off from
573 stack alignment. */
574 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
575 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
576 frame_phase = frame_off ? frame_alignment - frame_off : 0;
578 /* Round the frame offset to the specified alignment. The default is
579 to always honor requests to align the stack but a port may choose to
580 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
581 if (STACK_ALIGNMENT_NEEDED
582 || mode != BLKmode
583 || size != 0)
585 /* We must be careful here, since FRAME_OFFSET might be negative and
586 division with a negative dividend isn't as well defined as we might
587 like. So we instead assume that ALIGNMENT is a power of two and
588 use logical operations which are unambiguous. */
589 #ifdef FRAME_GROWS_DOWNWARD
590 function->x_frame_offset
591 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
592 + frame_phase);
593 #else
594 function->x_frame_offset
595 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
596 + frame_phase);
597 #endif
600 /* On a big-endian machine, if we are allocating more space than we will use,
601 use the least significant bytes of those that are allocated. */
602 if (BYTES_BIG_ENDIAN && mode != BLKmode)
603 bigend_correction = size - GET_MODE_SIZE (mode);
605 /* If we have already instantiated virtual registers, return the actual
606 address relative to the frame pointer. */
607 if (function == cfun && virtuals_instantiated)
608 addr = plus_constant (frame_pointer_rtx,
609 trunc_int_for_mode
610 (frame_offset + bigend_correction
611 + STARTING_FRAME_OFFSET, Pmode));
612 else
613 addr = plus_constant (virtual_stack_vars_rtx,
614 trunc_int_for_mode
615 (function->x_frame_offset + bigend_correction,
616 Pmode));
618 #ifndef FRAME_GROWS_DOWNWARD
619 function->x_frame_offset += size;
620 #endif
622 x = gen_rtx_MEM (mode, addr);
624 function->x_stack_slot_list
625 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
627 return x;
630 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
631 current function. */
634 assign_stack_local (mode, size, align)
635 enum machine_mode mode;
636 HOST_WIDE_INT size;
637 int align;
639 return assign_stack_local_1 (mode, size, align, cfun);
642 /* Allocate a temporary stack slot and record it for possible later
643 reuse.
645 MODE is the machine mode to be given to the returned rtx.
647 SIZE is the size in units of the space required. We do no rounding here
648 since assign_stack_local will do any required rounding.
650 KEEP is 1 if this slot is to be retained after a call to
651 free_temp_slots. Automatic variables for a block are allocated
652 with this flag. KEEP is 2 if we allocate a longer term temporary,
653 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
654 if we are to allocate something at an inner level to be treated as
655 a variable in the block (e.g., a SAVE_EXPR).
657 TYPE is the type that will be used for the stack slot. */
660 assign_stack_temp_for_type (mode, size, keep, type)
661 enum machine_mode mode;
662 HOST_WIDE_INT size;
663 int keep;
664 tree type;
666 unsigned int align;
667 struct temp_slot *p, *best_p = 0;
668 rtx slot;
670 /* If SIZE is -1 it means that somebody tried to allocate a temporary
671 of a variable size. */
672 if (size == -1)
673 abort ();
675 if (mode == BLKmode)
676 align = BIGGEST_ALIGNMENT;
677 else
678 align = GET_MODE_ALIGNMENT (mode);
680 if (! type)
681 type = (*lang_hooks.types.type_for_mode) (mode, 0);
683 if (type)
684 align = LOCAL_ALIGNMENT (type, align);
686 /* Try to find an available, already-allocated temporary of the proper
687 mode which meets the size and alignment requirements. Choose the
688 smallest one with the closest alignment. */
689 for (p = temp_slots; p; p = p->next)
690 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
691 && ! p->in_use
692 && objects_must_conflict_p (p->type, type)
693 && (best_p == 0 || best_p->size > p->size
694 || (best_p->size == p->size && best_p->align > p->align)))
696 if (p->align == align && p->size == size)
698 best_p = 0;
699 break;
701 best_p = p;
704 /* Make our best, if any, the one to use. */
705 if (best_p)
707 /* If there are enough aligned bytes left over, make them into a new
708 temp_slot so that the extra bytes don't get wasted. Do this only
709 for BLKmode slots, so that we can be sure of the alignment. */
710 if (GET_MODE (best_p->slot) == BLKmode)
712 int alignment = best_p->align / BITS_PER_UNIT;
713 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
715 if (best_p->size - rounded_size >= alignment)
717 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
718 p->in_use = p->addr_taken = 0;
719 p->size = best_p->size - rounded_size;
720 p->base_offset = best_p->base_offset + rounded_size;
721 p->full_size = best_p->full_size - rounded_size;
722 p->slot = gen_rtx_MEM (BLKmode,
723 plus_constant (XEXP (best_p->slot, 0),
724 rounded_size));
725 p->align = best_p->align;
726 p->address = 0;
727 p->rtl_expr = 0;
728 p->type = best_p->type;
729 p->next = temp_slots;
730 temp_slots = p;
732 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
733 stack_slot_list);
735 best_p->size = rounded_size;
736 best_p->full_size = rounded_size;
740 p = best_p;
743 /* If we still didn't find one, make a new temporary. */
744 if (p == 0)
746 HOST_WIDE_INT frame_offset_old = frame_offset;
748 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
750 /* We are passing an explicit alignment request to assign_stack_local.
751 One side effect of that is assign_stack_local will not round SIZE
752 to ensure the frame offset remains suitably aligned.
754 So for requests which depended on the rounding of SIZE, we go ahead
755 and round it now. We also make sure ALIGNMENT is at least
756 BIGGEST_ALIGNMENT. */
757 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
758 abort ();
759 p->slot = assign_stack_local (mode,
760 (mode == BLKmode
761 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
762 : size),
763 align);
765 p->align = align;
767 /* The following slot size computation is necessary because we don't
768 know the actual size of the temporary slot until assign_stack_local
769 has performed all the frame alignment and size rounding for the
770 requested temporary. Note that extra space added for alignment
771 can be either above or below this stack slot depending on which
772 way the frame grows. We include the extra space if and only if it
773 is above this slot. */
774 #ifdef FRAME_GROWS_DOWNWARD
775 p->size = frame_offset_old - frame_offset;
776 #else
777 p->size = size;
778 #endif
780 /* Now define the fields used by combine_temp_slots. */
781 #ifdef FRAME_GROWS_DOWNWARD
782 p->base_offset = frame_offset;
783 p->full_size = frame_offset_old - frame_offset;
784 #else
785 p->base_offset = frame_offset_old;
786 p->full_size = frame_offset - frame_offset_old;
787 #endif
788 p->address = 0;
789 p->next = temp_slots;
790 temp_slots = p;
793 p->in_use = 1;
794 p->addr_taken = 0;
795 p->rtl_expr = seq_rtl_expr;
796 p->type = type;
798 if (keep == 2)
800 p->level = target_temp_slot_level;
801 p->keep = 0;
803 else if (keep == 3)
805 p->level = var_temp_slot_level;
806 p->keep = 0;
808 else
810 p->level = temp_slot_level;
811 p->keep = keep;
815 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
816 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
817 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
819 /* If we know the alias set for the memory that will be used, use
820 it. If there's no TYPE, then we don't know anything about the
821 alias set for the memory. */
822 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
823 set_mem_align (slot, align);
825 /* If a type is specified, set the relevant flags. */
826 if (type != 0)
828 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
829 && TYPE_READONLY (type));
830 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
831 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
834 return slot;
837 /* Allocate a temporary stack slot and record it for possible later
838 reuse. First three arguments are same as in preceding function. */
841 assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
843 HOST_WIDE_INT size;
844 int keep;
846 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
849 /* Assign a temporary.
850 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
851 and so that should be used in error messages. In either case, we
852 allocate of the given type.
853 KEEP is as for assign_stack_temp.
854 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
855 it is 0 if a register is OK.
856 DONT_PROMOTE is 1 if we should not promote values in register
857 to wider modes. */
860 assign_temp (type_or_decl, keep, memory_required, dont_promote)
861 tree type_or_decl;
862 int keep;
863 int memory_required;
864 int dont_promote ATTRIBUTE_UNUSED;
866 tree type, decl;
867 enum machine_mode mode;
868 #ifndef PROMOTE_FOR_CALL_ONLY
869 int unsignedp;
870 #endif
872 if (DECL_P (type_or_decl))
873 decl = type_or_decl, type = TREE_TYPE (decl);
874 else
875 decl = NULL, type = type_or_decl;
877 mode = TYPE_MODE (type);
878 #ifndef PROMOTE_FOR_CALL_ONLY
879 unsignedp = TREE_UNSIGNED (type);
880 #endif
882 if (mode == BLKmode || memory_required)
884 HOST_WIDE_INT size = int_size_in_bytes (type);
885 rtx tmp;
887 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
888 problems with allocating the stack space. */
889 if (size == 0)
890 size = 1;
892 /* Unfortunately, we don't yet know how to allocate variable-sized
893 temporaries. However, sometimes we have a fixed upper limit on
894 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
895 instead. This is the case for Chill variable-sized strings. */
896 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
897 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
898 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
899 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
901 /* The size of the temporary may be too large to fit into an integer. */
902 /* ??? Not sure this should happen except for user silliness, so limit
903 this to things that aren't compiler-generated temporaries. The
904 rest of the time we'll abort in assign_stack_temp_for_type. */
905 if (decl && size == -1
906 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
908 error_with_decl (decl, "size of variable `%s' is too large");
909 size = 1;
912 tmp = assign_stack_temp_for_type (mode, size, keep, type);
913 return tmp;
916 #ifndef PROMOTE_FOR_CALL_ONLY
917 if (! dont_promote)
918 mode = promote_mode (type, mode, &unsignedp, 0);
919 #endif
921 return gen_reg_rtx (mode);
924 /* Combine temporary stack slots which are adjacent on the stack.
926 This allows for better use of already allocated stack space. This is only
927 done for BLKmode slots because we can be sure that we won't have alignment
928 problems in this case. */
930 void
931 combine_temp_slots ()
933 struct temp_slot *p, *q;
934 struct temp_slot *prev_p, *prev_q;
935 int num_slots;
937 /* We can't combine slots, because the information about which slot
938 is in which alias set will be lost. */
939 if (flag_strict_aliasing)
940 return;
942 /* If there are a lot of temp slots, don't do anything unless
943 high levels of optimization. */
944 if (! flag_expensive_optimizations)
945 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
946 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
947 return;
949 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
951 int delete_p = 0;
953 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
954 for (q = p->next, prev_q = p; q; q = prev_q->next)
956 int delete_q = 0;
957 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
959 if (p->base_offset + p->full_size == q->base_offset)
961 /* Q comes after P; combine Q into P. */
962 p->size += q->size;
963 p->full_size += q->full_size;
964 delete_q = 1;
966 else if (q->base_offset + q->full_size == p->base_offset)
968 /* P comes after Q; combine P into Q. */
969 q->size += p->size;
970 q->full_size += p->full_size;
971 delete_p = 1;
972 break;
975 /* Either delete Q or advance past it. */
976 if (delete_q)
977 prev_q->next = q->next;
978 else
979 prev_q = q;
981 /* Either delete P or advance past it. */
982 if (delete_p)
984 if (prev_p)
985 prev_p->next = p->next;
986 else
987 temp_slots = p->next;
989 else
990 prev_p = p;
994 /* Find the temp slot corresponding to the object at address X. */
996 static struct temp_slot *
997 find_temp_slot_from_address (x)
998 rtx x;
1000 struct temp_slot *p;
1001 rtx next;
1003 for (p = temp_slots; p; p = p->next)
1005 if (! p->in_use)
1006 continue;
1008 else if (XEXP (p->slot, 0) == x
1009 || p->address == x
1010 || (GET_CODE (x) == PLUS
1011 && XEXP (x, 0) == virtual_stack_vars_rtx
1012 && GET_CODE (XEXP (x, 1)) == CONST_INT
1013 && INTVAL (XEXP (x, 1)) >= p->base_offset
1014 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1015 return p;
1017 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1018 for (next = p->address; next; next = XEXP (next, 1))
1019 if (XEXP (next, 0) == x)
1020 return p;
1023 /* If we have a sum involving a register, see if it points to a temp
1024 slot. */
1025 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1026 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1027 return p;
1028 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1029 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1030 return p;
1032 return 0;
1035 /* Indicate that NEW is an alternate way of referring to the temp slot
1036 that previously was known by OLD. */
1038 void
1039 update_temp_slot_address (old, new)
1040 rtx old, new;
1042 struct temp_slot *p;
1044 if (rtx_equal_p (old, new))
1045 return;
1047 p = find_temp_slot_from_address (old);
1049 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1050 is a register, see if one operand of the PLUS is a temporary
1051 location. If so, NEW points into it. Otherwise, if both OLD and
1052 NEW are a PLUS and if there is a register in common between them.
1053 If so, try a recursive call on those values. */
1054 if (p == 0)
1056 if (GET_CODE (old) != PLUS)
1057 return;
1059 if (GET_CODE (new) == REG)
1061 update_temp_slot_address (XEXP (old, 0), new);
1062 update_temp_slot_address (XEXP (old, 1), new);
1063 return;
1065 else if (GET_CODE (new) != PLUS)
1066 return;
1068 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1069 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1070 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1071 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1072 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1073 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1074 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1075 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1077 return;
1080 /* Otherwise add an alias for the temp's address. */
1081 else if (p->address == 0)
1082 p->address = new;
1083 else
1085 if (GET_CODE (p->address) != EXPR_LIST)
1086 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1088 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1092 /* If X could be a reference to a temporary slot, mark the fact that its
1093 address was taken. */
1095 void
1096 mark_temp_addr_taken (x)
1097 rtx x;
1099 struct temp_slot *p;
1101 if (x == 0)
1102 return;
1104 /* If X is not in memory or is at a constant address, it cannot be in
1105 a temporary slot. */
1106 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1107 return;
1109 p = find_temp_slot_from_address (XEXP (x, 0));
1110 if (p != 0)
1111 p->addr_taken = 1;
1114 /* If X could be a reference to a temporary slot, mark that slot as
1115 belonging to the to one level higher than the current level. If X
1116 matched one of our slots, just mark that one. Otherwise, we can't
1117 easily predict which it is, so upgrade all of them. Kept slots
1118 need not be touched.
1120 This is called when an ({...}) construct occurs and a statement
1121 returns a value in memory. */
1123 void
1124 preserve_temp_slots (x)
1125 rtx x;
1127 struct temp_slot *p = 0;
1129 /* If there is no result, we still might have some objects whose address
1130 were taken, so we need to make sure they stay around. */
1131 if (x == 0)
1133 for (p = temp_slots; p; p = p->next)
1134 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1135 p->level--;
1137 return;
1140 /* If X is a register that is being used as a pointer, see if we have
1141 a temporary slot we know it points to. To be consistent with
1142 the code below, we really should preserve all non-kept slots
1143 if we can't find a match, but that seems to be much too costly. */
1144 if (GET_CODE (x) == REG && REG_POINTER (x))
1145 p = find_temp_slot_from_address (x);
1147 /* If X is not in memory or is at a constant address, it cannot be in
1148 a temporary slot, but it can contain something whose address was
1149 taken. */
1150 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1152 for (p = temp_slots; p; p = p->next)
1153 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1154 p->level--;
1156 return;
1159 /* First see if we can find a match. */
1160 if (p == 0)
1161 p = find_temp_slot_from_address (XEXP (x, 0));
1163 if (p != 0)
1165 /* Move everything at our level whose address was taken to our new
1166 level in case we used its address. */
1167 struct temp_slot *q;
1169 if (p->level == temp_slot_level)
1171 for (q = temp_slots; q; q = q->next)
1172 if (q != p && q->addr_taken && q->level == p->level)
1173 q->level--;
1175 p->level--;
1176 p->addr_taken = 0;
1178 return;
1181 /* Otherwise, preserve all non-kept slots at this level. */
1182 for (p = temp_slots; p; p = p->next)
1183 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1184 p->level--;
1187 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1188 with that RTL_EXPR, promote it into a temporary slot at the present
1189 level so it will not be freed when we free slots made in the
1190 RTL_EXPR. */
1192 void
1193 preserve_rtl_expr_result (x)
1194 rtx x;
1196 struct temp_slot *p;
1198 /* If X is not in memory or is at a constant address, it cannot be in
1199 a temporary slot. */
1200 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1201 return;
1203 /* If we can find a match, move it to our level unless it is already at
1204 an upper level. */
1205 p = find_temp_slot_from_address (XEXP (x, 0));
1206 if (p != 0)
1208 p->level = MIN (p->level, temp_slot_level);
1209 p->rtl_expr = 0;
1212 return;
1215 /* Free all temporaries used so far. This is normally called at the end
1216 of generating code for a statement. Don't free any temporaries
1217 currently in use for an RTL_EXPR that hasn't yet been emitted.
1218 We could eventually do better than this since it can be reused while
1219 generating the same RTL_EXPR, but this is complex and probably not
1220 worthwhile. */
1222 void
1223 free_temp_slots ()
1225 struct temp_slot *p;
1227 for (p = temp_slots; p; p = p->next)
1228 if (p->in_use && p->level == temp_slot_level && ! p->keep
1229 && p->rtl_expr == 0)
1230 p->in_use = 0;
1232 combine_temp_slots ();
1235 /* Free all temporary slots used in T, an RTL_EXPR node. */
1237 void
1238 free_temps_for_rtl_expr (t)
1239 tree t;
1241 struct temp_slot *p;
1243 for (p = temp_slots; p; p = p->next)
1244 if (p->rtl_expr == t)
1246 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1247 needs to be preserved. This can happen if a temporary in
1248 the RTL_EXPR was addressed; preserve_temp_slots will move
1249 the temporary into a higher level. */
1250 if (temp_slot_level <= p->level)
1251 p->in_use = 0;
1252 else
1253 p->rtl_expr = NULL_TREE;
1256 combine_temp_slots ();
1259 /* Mark all temporaries ever allocated in this function as not suitable
1260 for reuse until the current level is exited. */
1262 void
1263 mark_all_temps_used ()
1265 struct temp_slot *p;
1267 for (p = temp_slots; p; p = p->next)
1269 p->in_use = p->keep = 1;
1270 p->level = MIN (p->level, temp_slot_level);
1274 /* Push deeper into the nesting level for stack temporaries. */
1276 void
1277 push_temp_slots ()
1279 temp_slot_level++;
1282 /* Pop a temporary nesting level. All slots in use in the current level
1283 are freed. */
1285 void
1286 pop_temp_slots ()
1288 struct temp_slot *p;
1290 for (p = temp_slots; p; p = p->next)
1291 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1292 p->in_use = 0;
1294 combine_temp_slots ();
1296 temp_slot_level--;
1299 /* Initialize temporary slots. */
1301 void
1302 init_temp_slots ()
1304 /* We have not allocated any temporaries yet. */
1305 temp_slots = 0;
1306 temp_slot_level = 0;
1307 var_temp_slot_level = 0;
1308 target_temp_slot_level = 0;
1311 /* Retroactively move an auto variable from a register to a stack
1312 slot. This is done when an address-reference to the variable is
1313 seen. If RESCAN is true, all previously emitted instructions are
1314 examined and modified to handle the fact that DECL is now
1315 addressable. */
1317 void
1318 put_var_into_stack (decl, rescan)
1319 tree decl;
1320 int rescan;
1322 rtx reg;
1323 enum machine_mode promoted_mode, decl_mode;
1324 struct function *function = 0;
1325 tree context;
1326 int can_use_addressof;
1327 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1328 int usedp = (TREE_USED (decl)
1329 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1331 context = decl_function_context (decl);
1333 /* Get the current rtl used for this object and its original mode. */
1334 reg = (TREE_CODE (decl) == SAVE_EXPR
1335 ? SAVE_EXPR_RTL (decl)
1336 : DECL_RTL_IF_SET (decl));
1338 /* No need to do anything if decl has no rtx yet
1339 since in that case caller is setting TREE_ADDRESSABLE
1340 and a stack slot will be assigned when the rtl is made. */
1341 if (reg == 0)
1342 return;
1344 /* Get the declared mode for this object. */
1345 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1346 : DECL_MODE (decl));
1347 /* Get the mode it's actually stored in. */
1348 promoted_mode = GET_MODE (reg);
1350 /* If this variable comes from an outer function, find that
1351 function's saved context. Don't use find_function_data here,
1352 because it might not be in any active function.
1353 FIXME: Is that really supposed to happen?
1354 It does in ObjC at least. */
1355 if (context != current_function_decl && context != inline_function_decl)
1356 for (function = outer_function_chain; function; function = function->outer)
1357 if (function->decl == context)
1358 break;
1360 /* If this is a variable-size object with a pseudo to address it,
1361 put that pseudo into the stack, if the var is nonlocal. */
1362 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1363 && GET_CODE (reg) == MEM
1364 && GET_CODE (XEXP (reg, 0)) == REG
1365 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1367 reg = XEXP (reg, 0);
1368 decl_mode = promoted_mode = GET_MODE (reg);
1371 can_use_addressof
1372 = (function == 0
1373 && optimize > 0
1374 /* FIXME make it work for promoted modes too */
1375 && decl_mode == promoted_mode
1376 #ifdef NON_SAVING_SETJMP
1377 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1378 #endif
1381 /* If we can't use ADDRESSOF, make sure we see through one we already
1382 generated. */
1383 if (! can_use_addressof && GET_CODE (reg) == MEM
1384 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1385 reg = XEXP (XEXP (reg, 0), 0);
1387 /* Now we should have a value that resides in one or more pseudo regs. */
1389 if (GET_CODE (reg) == REG)
1391 /* If this variable lives in the current function and we don't need
1392 to put things in the stack for the sake of setjmp, try to keep it
1393 in a register until we know we actually need the address. */
1394 if (can_use_addressof)
1395 gen_mem_addressof (reg, decl, rescan);
1396 else
1397 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1398 decl_mode, volatilep, 0, usedp, 0);
1400 else if (GET_CODE (reg) == CONCAT)
1402 /* A CONCAT contains two pseudos; put them both in the stack.
1403 We do it so they end up consecutive.
1404 We fixup references to the parts only after we fixup references
1405 to the whole CONCAT, lest we do double fixups for the latter
1406 references. */
1407 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1408 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1409 rtx lopart = XEXP (reg, 0);
1410 rtx hipart = XEXP (reg, 1);
1411 #ifdef FRAME_GROWS_DOWNWARD
1412 /* Since part 0 should have a lower address, do it second. */
1413 put_reg_into_stack (function, hipart, part_type, part_mode,
1414 part_mode, volatilep, 0, 0, 0);
1415 put_reg_into_stack (function, lopart, part_type, part_mode,
1416 part_mode, volatilep, 0, 0, 0);
1417 #else
1418 put_reg_into_stack (function, lopart, part_type, part_mode,
1419 part_mode, volatilep, 0, 0, 0);
1420 put_reg_into_stack (function, hipart, part_type, part_mode,
1421 part_mode, volatilep, 0, 0, 0);
1422 #endif
1424 /* Change the CONCAT into a combined MEM for both parts. */
1425 PUT_CODE (reg, MEM);
1426 MEM_ATTRS (reg) = 0;
1428 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1429 already computed alias sets. Here we want to re-generate. */
1430 if (DECL_P (decl))
1431 SET_DECL_RTL (decl, NULL);
1432 set_mem_attributes (reg, decl, 1);
1433 if (DECL_P (decl))
1434 SET_DECL_RTL (decl, reg);
1436 /* The two parts are in memory order already.
1437 Use the lower parts address as ours. */
1438 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1439 /* Prevent sharing of rtl that might lose. */
1440 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1441 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1442 if (usedp && rescan)
1444 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1445 promoted_mode, 0);
1446 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1447 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1450 else
1451 return;
1454 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1455 into the stack frame of FUNCTION (0 means the current function).
1456 DECL_MODE is the machine mode of the user-level data type.
1457 PROMOTED_MODE is the machine mode of the register.
1458 VOLATILE_P is nonzero if this is for a "volatile" decl.
1459 USED_P is nonzero if this reg might have already been used in an insn. */
1461 static void
1462 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1463 original_regno, used_p, ht)
1464 struct function *function;
1465 rtx reg;
1466 tree type;
1467 enum machine_mode promoted_mode, decl_mode;
1468 int volatile_p;
1469 unsigned int original_regno;
1470 int used_p;
1471 htab_t ht;
1473 struct function *func = function ? function : cfun;
1474 rtx new = 0;
1475 unsigned int regno = original_regno;
1477 if (regno == 0)
1478 regno = REGNO (reg);
1480 if (regno < func->x_max_parm_reg)
1481 new = func->x_parm_reg_stack_loc[regno];
1483 if (new == 0)
1484 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1486 PUT_CODE (reg, MEM);
1487 PUT_MODE (reg, decl_mode);
1488 XEXP (reg, 0) = XEXP (new, 0);
1489 MEM_ATTRS (reg) = 0;
1490 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1491 MEM_VOLATILE_P (reg) = volatile_p;
1493 /* If this is a memory ref that contains aggregate components,
1494 mark it as such for cse and loop optimize. If we are reusing a
1495 previously generated stack slot, then we need to copy the bit in
1496 case it was set for other reasons. For instance, it is set for
1497 __builtin_va_alist. */
1498 if (type)
1500 MEM_SET_IN_STRUCT_P (reg,
1501 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1502 set_mem_alias_set (reg, get_alias_set (type));
1505 if (used_p)
1506 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1509 /* Make sure that all refs to the variable, previously made
1510 when it was a register, are fixed up to be valid again.
1511 See function above for meaning of arguments. */
1513 static void
1514 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1515 struct function *function;
1516 rtx reg;
1517 tree type;
1518 enum machine_mode promoted_mode;
1519 htab_t ht;
1521 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1523 if (function != 0)
1525 struct var_refs_queue *temp;
1527 temp
1528 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1529 temp->modified = reg;
1530 temp->promoted_mode = promoted_mode;
1531 temp->unsignedp = unsigned_p;
1532 temp->next = function->fixup_var_refs_queue;
1533 function->fixup_var_refs_queue = temp;
1535 else
1536 /* Variable is local; fix it up now. */
1537 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1540 static void
1541 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1542 rtx var;
1543 enum machine_mode promoted_mode;
1544 int unsignedp;
1545 htab_t ht;
1546 rtx may_share;
1548 tree pending;
1549 rtx first_insn = get_insns ();
1550 struct sequence_stack *stack = seq_stack;
1551 tree rtl_exps = rtl_expr_chain;
1553 /* If there's a hash table, it must record all uses of VAR. */
1554 if (ht)
1556 if (stack != 0)
1557 abort ();
1558 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1559 may_share);
1560 return;
1563 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1564 stack == 0, may_share);
1566 /* Scan all pending sequences too. */
1567 for (; stack; stack = stack->next)
1569 push_to_full_sequence (stack->first, stack->last);
1570 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1571 stack->next != 0, may_share);
1572 /* Update remembered end of sequence
1573 in case we added an insn at the end. */
1574 stack->last = get_last_insn ();
1575 end_sequence ();
1578 /* Scan all waiting RTL_EXPRs too. */
1579 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1581 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1582 if (seq != const0_rtx && seq != 0)
1584 push_to_sequence (seq);
1585 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1586 may_share);
1587 end_sequence ();
1592 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1593 some part of an insn. Return a struct fixup_replacement whose OLD
1594 value is equal to X. Allocate a new structure if no such entry exists. */
1596 static struct fixup_replacement *
1597 find_fixup_replacement (replacements, x)
1598 struct fixup_replacement **replacements;
1599 rtx x;
1601 struct fixup_replacement *p;
1603 /* See if we have already replaced this. */
1604 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1607 if (p == 0)
1609 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1610 p->old = x;
1611 p->new = 0;
1612 p->next = *replacements;
1613 *replacements = p;
1616 return p;
1619 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1620 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1621 for the current function. MAY_SHARE is either a MEM that is not
1622 to be unshared or a list of them. */
1624 static void
1625 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1626 rtx insn;
1627 rtx var;
1628 enum machine_mode promoted_mode;
1629 int unsignedp;
1630 int toplevel;
1631 rtx may_share;
1633 while (insn)
1635 /* fixup_var_refs_insn might modify insn, so save its next
1636 pointer now. */
1637 rtx next = NEXT_INSN (insn);
1639 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1640 the three sequences they (potentially) contain, and process
1641 them recursively. The CALL_INSN itself is not interesting. */
1643 if (GET_CODE (insn) == CALL_INSN
1644 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1646 int i;
1648 /* Look at the Normal call, sibling call and tail recursion
1649 sequences attached to the CALL_PLACEHOLDER. */
1650 for (i = 0; i < 3; i++)
1652 rtx seq = XEXP (PATTERN (insn), i);
1653 if (seq)
1655 push_to_sequence (seq);
1656 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1657 may_share);
1658 XEXP (PATTERN (insn), i) = get_insns ();
1659 end_sequence ();
1664 else if (INSN_P (insn))
1665 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1666 may_share);
1668 insn = next;
1672 /* Look up the insns which reference VAR in HT and fix them up. Other
1673 arguments are the same as fixup_var_refs_insns.
1675 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1676 because the hash table will point straight to the interesting insn
1677 (inside the CALL_PLACEHOLDER). */
1679 static void
1680 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1681 htab_t ht;
1682 rtx var;
1683 enum machine_mode promoted_mode;
1684 int unsignedp;
1685 rtx may_share;
1687 struct insns_for_mem_entry tmp;
1688 struct insns_for_mem_entry *ime;
1689 rtx insn_list;
1691 tmp.key = var;
1692 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1693 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1694 if (INSN_P (XEXP (insn_list, 0)))
1695 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1696 unsignedp, 1, may_share);
1700 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1701 the insn under examination, VAR is the variable to fix up
1702 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1703 TOPLEVEL is nonzero if this is the main insn chain for this
1704 function. */
1706 static void
1707 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1708 rtx insn;
1709 rtx var;
1710 enum machine_mode promoted_mode;
1711 int unsignedp;
1712 int toplevel;
1713 rtx no_share;
1715 rtx call_dest = 0;
1716 rtx set, prev, prev_set;
1717 rtx note;
1719 /* Remember the notes in case we delete the insn. */
1720 note = REG_NOTES (insn);
1722 /* If this is a CLOBBER of VAR, delete it.
1724 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1725 and REG_RETVAL notes too. */
1726 if (GET_CODE (PATTERN (insn)) == CLOBBER
1727 && (XEXP (PATTERN (insn), 0) == var
1728 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1729 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1730 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1732 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1733 /* The REG_LIBCALL note will go away since we are going to
1734 turn INSN into a NOTE, so just delete the
1735 corresponding REG_RETVAL note. */
1736 remove_note (XEXP (note, 0),
1737 find_reg_note (XEXP (note, 0), REG_RETVAL,
1738 NULL_RTX));
1740 delete_insn (insn);
1743 /* The insn to load VAR from a home in the arglist
1744 is now a no-op. When we see it, just delete it.
1745 Similarly if this is storing VAR from a register from which
1746 it was loaded in the previous insn. This will occur
1747 when an ADDRESSOF was made for an arglist slot. */
1748 else if (toplevel
1749 && (set = single_set (insn)) != 0
1750 && SET_DEST (set) == var
1751 /* If this represents the result of an insn group,
1752 don't delete the insn. */
1753 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1754 && (rtx_equal_p (SET_SRC (set), var)
1755 || (GET_CODE (SET_SRC (set)) == REG
1756 && (prev = prev_nonnote_insn (insn)) != 0
1757 && (prev_set = single_set (prev)) != 0
1758 && SET_DEST (prev_set) == SET_SRC (set)
1759 && rtx_equal_p (SET_SRC (prev_set), var))))
1761 delete_insn (insn);
1763 else
1765 struct fixup_replacement *replacements = 0;
1766 rtx next_insn = NEXT_INSN (insn);
1768 if (SMALL_REGISTER_CLASSES)
1770 /* If the insn that copies the results of a CALL_INSN
1771 into a pseudo now references VAR, we have to use an
1772 intermediate pseudo since we want the life of the
1773 return value register to be only a single insn.
1775 If we don't use an intermediate pseudo, such things as
1776 address computations to make the address of VAR valid
1777 if it is not can be placed between the CALL_INSN and INSN.
1779 To make sure this doesn't happen, we record the destination
1780 of the CALL_INSN and see if the next insn uses both that
1781 and VAR. */
1783 if (call_dest != 0 && GET_CODE (insn) == INSN
1784 && reg_mentioned_p (var, PATTERN (insn))
1785 && reg_mentioned_p (call_dest, PATTERN (insn)))
1787 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1789 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1791 PATTERN (insn) = replace_rtx (PATTERN (insn),
1792 call_dest, temp);
1795 if (GET_CODE (insn) == CALL_INSN
1796 && GET_CODE (PATTERN (insn)) == SET)
1797 call_dest = SET_DEST (PATTERN (insn));
1798 else if (GET_CODE (insn) == CALL_INSN
1799 && GET_CODE (PATTERN (insn)) == PARALLEL
1800 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1801 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1802 else
1803 call_dest = 0;
1806 /* See if we have to do anything to INSN now that VAR is in
1807 memory. If it needs to be loaded into a pseudo, use a single
1808 pseudo for the entire insn in case there is a MATCH_DUP
1809 between two operands. We pass a pointer to the head of
1810 a list of struct fixup_replacements. If fixup_var_refs_1
1811 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1812 it will record them in this list.
1814 If it allocated a pseudo for any replacement, we copy into
1815 it here. */
1817 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1818 &replacements, no_share);
1820 /* If this is last_parm_insn, and any instructions were output
1821 after it to fix it up, then we must set last_parm_insn to
1822 the last such instruction emitted. */
1823 if (insn == last_parm_insn)
1824 last_parm_insn = PREV_INSN (next_insn);
1826 while (replacements)
1828 struct fixup_replacement *next;
1830 if (GET_CODE (replacements->new) == REG)
1832 rtx insert_before;
1833 rtx seq;
1835 /* OLD might be a (subreg (mem)). */
1836 if (GET_CODE (replacements->old) == SUBREG)
1837 replacements->old
1838 = fixup_memory_subreg (replacements->old, insn,
1839 promoted_mode, 0);
1840 else
1841 replacements->old
1842 = fixup_stack_1 (replacements->old, insn);
1844 insert_before = insn;
1846 /* If we are changing the mode, do a conversion.
1847 This might be wasteful, but combine.c will
1848 eliminate much of the waste. */
1850 if (GET_MODE (replacements->new)
1851 != GET_MODE (replacements->old))
1853 start_sequence ();
1854 convert_move (replacements->new,
1855 replacements->old, unsignedp);
1856 seq = get_insns ();
1857 end_sequence ();
1859 else
1860 seq = gen_move_insn (replacements->new,
1861 replacements->old);
1863 emit_insn_before (seq, insert_before);
1866 next = replacements->next;
1867 free (replacements);
1868 replacements = next;
1872 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1873 But don't touch other insns referred to by reg-notes;
1874 we will get them elsewhere. */
1875 while (note)
1877 if (GET_CODE (note) != INSN_LIST)
1878 XEXP (note, 0)
1879 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1880 promoted_mode, 1);
1881 note = XEXP (note, 1);
1885 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1886 See if the rtx expression at *LOC in INSN needs to be changed.
1888 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1889 contain a list of original rtx's and replacements. If we find that we need
1890 to modify this insn by replacing a memory reference with a pseudo or by
1891 making a new MEM to implement a SUBREG, we consult that list to see if
1892 we have already chosen a replacement. If none has already been allocated,
1893 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1894 or the SUBREG, as appropriate, to the pseudo. */
1896 static void
1897 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1898 rtx var;
1899 enum machine_mode promoted_mode;
1900 rtx *loc;
1901 rtx insn;
1902 struct fixup_replacement **replacements;
1903 rtx no_share;
1905 int i;
1906 rtx x = *loc;
1907 RTX_CODE code = GET_CODE (x);
1908 const char *fmt;
1909 rtx tem, tem1;
1910 struct fixup_replacement *replacement;
1912 switch (code)
1914 case ADDRESSOF:
1915 if (XEXP (x, 0) == var)
1917 /* Prevent sharing of rtl that might lose. */
1918 rtx sub = copy_rtx (XEXP (var, 0));
1920 if (! validate_change (insn, loc, sub, 0))
1922 rtx y = gen_reg_rtx (GET_MODE (sub));
1923 rtx seq, new_insn;
1925 /* We should be able to replace with a register or all is lost.
1926 Note that we can't use validate_change to verify this, since
1927 we're not caring for replacing all dups simultaneously. */
1928 if (! validate_replace_rtx (*loc, y, insn))
1929 abort ();
1931 /* Careful! First try to recognize a direct move of the
1932 value, mimicking how things are done in gen_reload wrt
1933 PLUS. Consider what happens when insn is a conditional
1934 move instruction and addsi3 clobbers flags. */
1936 start_sequence ();
1937 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1938 seq = get_insns ();
1939 end_sequence ();
1941 if (recog_memoized (new_insn) < 0)
1943 /* That failed. Fall back on force_operand and hope. */
1945 start_sequence ();
1946 sub = force_operand (sub, y);
1947 if (sub != y)
1948 emit_insn (gen_move_insn (y, sub));
1949 seq = get_insns ();
1950 end_sequence ();
1953 #ifdef HAVE_cc0
1954 /* Don't separate setter from user. */
1955 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1956 insn = PREV_INSN (insn);
1957 #endif
1959 emit_insn_before (seq, insn);
1962 return;
1964 case MEM:
1965 if (var == x)
1967 /* If we already have a replacement, use it. Otherwise,
1968 try to fix up this address in case it is invalid. */
1970 replacement = find_fixup_replacement (replacements, var);
1971 if (replacement->new)
1973 *loc = replacement->new;
1974 return;
1977 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1979 /* Unless we are forcing memory to register or we changed the mode,
1980 we can leave things the way they are if the insn is valid. */
1982 INSN_CODE (insn) = -1;
1983 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1984 && recog_memoized (insn) >= 0)
1985 return;
1987 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1988 return;
1991 /* If X contains VAR, we need to unshare it here so that we update
1992 each occurrence separately. But all identical MEMs in one insn
1993 must be replaced with the same rtx because of the possibility of
1994 MATCH_DUPs. */
1996 if (reg_mentioned_p (var, x))
1998 replacement = find_fixup_replacement (replacements, x);
1999 if (replacement->new == 0)
2000 replacement->new = copy_most_rtx (x, no_share);
2002 *loc = x = replacement->new;
2003 code = GET_CODE (x);
2005 break;
2007 case REG:
2008 case CC0:
2009 case PC:
2010 case CONST_INT:
2011 case CONST:
2012 case SYMBOL_REF:
2013 case LABEL_REF:
2014 case CONST_DOUBLE:
2015 case CONST_VECTOR:
2016 return;
2018 case SIGN_EXTRACT:
2019 case ZERO_EXTRACT:
2020 /* Note that in some cases those types of expressions are altered
2021 by optimize_bit_field, and do not survive to get here. */
2022 if (XEXP (x, 0) == var
2023 || (GET_CODE (XEXP (x, 0)) == SUBREG
2024 && SUBREG_REG (XEXP (x, 0)) == var))
2026 /* Get TEM as a valid MEM in the mode presently in the insn.
2028 We don't worry about the possibility of MATCH_DUP here; it
2029 is highly unlikely and would be tricky to handle. */
2031 tem = XEXP (x, 0);
2032 if (GET_CODE (tem) == SUBREG)
2034 if (GET_MODE_BITSIZE (GET_MODE (tem))
2035 > GET_MODE_BITSIZE (GET_MODE (var)))
2037 replacement = find_fixup_replacement (replacements, var);
2038 if (replacement->new == 0)
2039 replacement->new = gen_reg_rtx (GET_MODE (var));
2040 SUBREG_REG (tem) = replacement->new;
2042 /* The following code works only if we have a MEM, so we
2043 need to handle the subreg here. We directly substitute
2044 it assuming that a subreg must be OK here. We already
2045 scheduled a replacement to copy the mem into the
2046 subreg. */
2047 XEXP (x, 0) = tem;
2048 return;
2050 else
2051 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2053 else
2054 tem = fixup_stack_1 (tem, insn);
2056 /* Unless we want to load from memory, get TEM into the proper mode
2057 for an extract from memory. This can only be done if the
2058 extract is at a constant position and length. */
2060 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2061 && GET_CODE (XEXP (x, 2)) == CONST_INT
2062 && ! mode_dependent_address_p (XEXP (tem, 0))
2063 && ! MEM_VOLATILE_P (tem))
2065 enum machine_mode wanted_mode = VOIDmode;
2066 enum machine_mode is_mode = GET_MODE (tem);
2067 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2069 if (GET_CODE (x) == ZERO_EXTRACT)
2071 enum machine_mode new_mode
2072 = mode_for_extraction (EP_extzv, 1);
2073 if (new_mode != MAX_MACHINE_MODE)
2074 wanted_mode = new_mode;
2076 else if (GET_CODE (x) == SIGN_EXTRACT)
2078 enum machine_mode new_mode
2079 = mode_for_extraction (EP_extv, 1);
2080 if (new_mode != MAX_MACHINE_MODE)
2081 wanted_mode = new_mode;
2084 /* If we have a narrower mode, we can do something. */
2085 if (wanted_mode != VOIDmode
2086 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2088 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2089 rtx old_pos = XEXP (x, 2);
2090 rtx newmem;
2092 /* If the bytes and bits are counted differently, we
2093 must adjust the offset. */
2094 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2095 offset = (GET_MODE_SIZE (is_mode)
2096 - GET_MODE_SIZE (wanted_mode) - offset);
2098 pos %= GET_MODE_BITSIZE (wanted_mode);
2100 newmem = adjust_address_nv (tem, wanted_mode, offset);
2102 /* Make the change and see if the insn remains valid. */
2103 INSN_CODE (insn) = -1;
2104 XEXP (x, 0) = newmem;
2105 XEXP (x, 2) = GEN_INT (pos);
2107 if (recog_memoized (insn) >= 0)
2108 return;
2110 /* Otherwise, restore old position. XEXP (x, 0) will be
2111 restored later. */
2112 XEXP (x, 2) = old_pos;
2116 /* If we get here, the bitfield extract insn can't accept a memory
2117 reference. Copy the input into a register. */
2119 tem1 = gen_reg_rtx (GET_MODE (tem));
2120 emit_insn_before (gen_move_insn (tem1, tem), insn);
2121 XEXP (x, 0) = tem1;
2122 return;
2124 break;
2126 case SUBREG:
2127 if (SUBREG_REG (x) == var)
2129 /* If this is a special SUBREG made because VAR was promoted
2130 from a wider mode, replace it with VAR and call ourself
2131 recursively, this time saying that the object previously
2132 had its current mode (by virtue of the SUBREG). */
2134 if (SUBREG_PROMOTED_VAR_P (x))
2136 *loc = var;
2137 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2138 no_share);
2139 return;
2142 /* If this SUBREG makes VAR wider, it has become a paradoxical
2143 SUBREG with VAR in memory, but these aren't allowed at this
2144 stage of the compilation. So load VAR into a pseudo and take
2145 a SUBREG of that pseudo. */
2146 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2148 replacement = find_fixup_replacement (replacements, var);
2149 if (replacement->new == 0)
2150 replacement->new = gen_reg_rtx (promoted_mode);
2151 SUBREG_REG (x) = replacement->new;
2152 return;
2155 /* See if we have already found a replacement for this SUBREG.
2156 If so, use it. Otherwise, make a MEM and see if the insn
2157 is recognized. If not, or if we should force MEM into a register,
2158 make a pseudo for this SUBREG. */
2159 replacement = find_fixup_replacement (replacements, x);
2160 if (replacement->new)
2162 *loc = replacement->new;
2163 return;
2166 replacement->new = *loc = fixup_memory_subreg (x, insn,
2167 promoted_mode, 0);
2169 INSN_CODE (insn) = -1;
2170 if (! flag_force_mem && recog_memoized (insn) >= 0)
2171 return;
2173 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2174 return;
2176 break;
2178 case SET:
2179 /* First do special simplification of bit-field references. */
2180 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2181 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2182 optimize_bit_field (x, insn, 0);
2183 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2184 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2185 optimize_bit_field (x, insn, 0);
2187 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2188 into a register and then store it back out. */
2189 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2190 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2191 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2192 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2193 > GET_MODE_SIZE (GET_MODE (var))))
2195 replacement = find_fixup_replacement (replacements, var);
2196 if (replacement->new == 0)
2197 replacement->new = gen_reg_rtx (GET_MODE (var));
2199 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2200 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2203 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2204 insn into a pseudo and store the low part of the pseudo into VAR. */
2205 if (GET_CODE (SET_DEST (x)) == SUBREG
2206 && SUBREG_REG (SET_DEST (x)) == var
2207 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2208 > GET_MODE_SIZE (GET_MODE (var))))
2210 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2211 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2212 tem)),
2213 insn);
2214 break;
2218 rtx dest = SET_DEST (x);
2219 rtx src = SET_SRC (x);
2220 rtx outerdest = dest;
2222 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2223 || GET_CODE (dest) == SIGN_EXTRACT
2224 || GET_CODE (dest) == ZERO_EXTRACT)
2225 dest = XEXP (dest, 0);
2227 if (GET_CODE (src) == SUBREG)
2228 src = SUBREG_REG (src);
2230 /* If VAR does not appear at the top level of the SET
2231 just scan the lower levels of the tree. */
2233 if (src != var && dest != var)
2234 break;
2236 /* We will need to rerecognize this insn. */
2237 INSN_CODE (insn) = -1;
2239 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2240 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2242 /* Since this case will return, ensure we fixup all the
2243 operands here. */
2244 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2245 insn, replacements, no_share);
2246 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2247 insn, replacements, no_share);
2248 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2249 insn, replacements, no_share);
2251 tem = XEXP (outerdest, 0);
2253 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2254 that may appear inside a ZERO_EXTRACT.
2255 This was legitimate when the MEM was a REG. */
2256 if (GET_CODE (tem) == SUBREG
2257 && SUBREG_REG (tem) == var)
2258 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2259 else
2260 tem = fixup_stack_1 (tem, insn);
2262 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2263 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2264 && ! mode_dependent_address_p (XEXP (tem, 0))
2265 && ! MEM_VOLATILE_P (tem))
2267 enum machine_mode wanted_mode;
2268 enum machine_mode is_mode = GET_MODE (tem);
2269 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2271 wanted_mode = mode_for_extraction (EP_insv, 0);
2273 /* If we have a narrower mode, we can do something. */
2274 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2276 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2277 rtx old_pos = XEXP (outerdest, 2);
2278 rtx newmem;
2280 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2281 offset = (GET_MODE_SIZE (is_mode)
2282 - GET_MODE_SIZE (wanted_mode) - offset);
2284 pos %= GET_MODE_BITSIZE (wanted_mode);
2286 newmem = adjust_address_nv (tem, wanted_mode, offset);
2288 /* Make the change and see if the insn remains valid. */
2289 INSN_CODE (insn) = -1;
2290 XEXP (outerdest, 0) = newmem;
2291 XEXP (outerdest, 2) = GEN_INT (pos);
2293 if (recog_memoized (insn) >= 0)
2294 return;
2296 /* Otherwise, restore old position. XEXP (x, 0) will be
2297 restored later. */
2298 XEXP (outerdest, 2) = old_pos;
2302 /* If we get here, the bit-field store doesn't allow memory
2303 or isn't located at a constant position. Load the value into
2304 a register, do the store, and put it back into memory. */
2306 tem1 = gen_reg_rtx (GET_MODE (tem));
2307 emit_insn_before (gen_move_insn (tem1, tem), insn);
2308 emit_insn_after (gen_move_insn (tem, tem1), insn);
2309 XEXP (outerdest, 0) = tem1;
2310 return;
2313 /* STRICT_LOW_PART is a no-op on memory references
2314 and it can cause combinations to be unrecognizable,
2315 so eliminate it. */
2317 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2318 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2320 /* A valid insn to copy VAR into or out of a register
2321 must be left alone, to avoid an infinite loop here.
2322 If the reference to VAR is by a subreg, fix that up,
2323 since SUBREG is not valid for a memref.
2324 Also fix up the address of the stack slot.
2326 Note that we must not try to recognize the insn until
2327 after we know that we have valid addresses and no
2328 (subreg (mem ...) ...) constructs, since these interfere
2329 with determining the validity of the insn. */
2331 if ((SET_SRC (x) == var
2332 || (GET_CODE (SET_SRC (x)) == SUBREG
2333 && SUBREG_REG (SET_SRC (x)) == var))
2334 && (GET_CODE (SET_DEST (x)) == REG
2335 || (GET_CODE (SET_DEST (x)) == SUBREG
2336 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2337 && GET_MODE (var) == promoted_mode
2338 && x == single_set (insn))
2340 rtx pat, last;
2342 if (GET_CODE (SET_SRC (x)) == SUBREG
2343 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2344 > GET_MODE_SIZE (GET_MODE (var))))
2346 /* This (subreg VAR) is now a paradoxical subreg. We need
2347 to replace VAR instead of the subreg. */
2348 replacement = find_fixup_replacement (replacements, var);
2349 if (replacement->new == NULL_RTX)
2350 replacement->new = gen_reg_rtx (GET_MODE (var));
2351 SUBREG_REG (SET_SRC (x)) = replacement->new;
2353 else
2355 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2356 if (replacement->new)
2357 SET_SRC (x) = replacement->new;
2358 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2359 SET_SRC (x) = replacement->new
2360 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2362 else
2363 SET_SRC (x) = replacement->new
2364 = fixup_stack_1 (SET_SRC (x), insn);
2367 if (recog_memoized (insn) >= 0)
2368 return;
2370 /* INSN is not valid, but we know that we want to
2371 copy SET_SRC (x) to SET_DEST (x) in some way. So
2372 we generate the move and see whether it requires more
2373 than one insn. If it does, we emit those insns and
2374 delete INSN. Otherwise, we can just replace the pattern
2375 of INSN; we have already verified above that INSN has
2376 no other function that to do X. */
2378 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2379 if (NEXT_INSN (pat) != NULL_RTX)
2381 last = emit_insn_before (pat, insn);
2383 /* INSN might have REG_RETVAL or other important notes, so
2384 we need to store the pattern of the last insn in the
2385 sequence into INSN similarly to the normal case. LAST
2386 should not have REG_NOTES, but we allow them if INSN has
2387 no REG_NOTES. */
2388 if (REG_NOTES (last) && REG_NOTES (insn))
2389 abort ();
2390 if (REG_NOTES (last))
2391 REG_NOTES (insn) = REG_NOTES (last);
2392 PATTERN (insn) = PATTERN (last);
2394 delete_insn (last);
2396 else
2397 PATTERN (insn) = PATTERN (pat);
2399 return;
2402 if ((SET_DEST (x) == var
2403 || (GET_CODE (SET_DEST (x)) == SUBREG
2404 && SUBREG_REG (SET_DEST (x)) == var))
2405 && (GET_CODE (SET_SRC (x)) == REG
2406 || (GET_CODE (SET_SRC (x)) == SUBREG
2407 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2408 && GET_MODE (var) == promoted_mode
2409 && x == single_set (insn))
2411 rtx pat, last;
2413 if (GET_CODE (SET_DEST (x)) == SUBREG)
2414 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2415 promoted_mode, 0);
2416 else
2417 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2419 if (recog_memoized (insn) >= 0)
2420 return;
2422 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2423 if (NEXT_INSN (pat) != NULL_RTX)
2425 last = emit_insn_before (pat, insn);
2427 /* INSN might have REG_RETVAL or other important notes, so
2428 we need to store the pattern of the last insn in the
2429 sequence into INSN similarly to the normal case. LAST
2430 should not have REG_NOTES, but we allow them if INSN has
2431 no REG_NOTES. */
2432 if (REG_NOTES (last) && REG_NOTES (insn))
2433 abort ();
2434 if (REG_NOTES (last))
2435 REG_NOTES (insn) = REG_NOTES (last);
2436 PATTERN (insn) = PATTERN (last);
2438 delete_insn (last);
2440 else
2441 PATTERN (insn) = PATTERN (pat);
2443 return;
2446 /* Otherwise, storing into VAR must be handled specially
2447 by storing into a temporary and copying that into VAR
2448 with a new insn after this one. Note that this case
2449 will be used when storing into a promoted scalar since
2450 the insn will now have different modes on the input
2451 and output and hence will be invalid (except for the case
2452 of setting it to a constant, which does not need any
2453 change if it is valid). We generate extra code in that case,
2454 but combine.c will eliminate it. */
2456 if (dest == var)
2458 rtx temp;
2459 rtx fixeddest = SET_DEST (x);
2460 enum machine_mode temp_mode;
2462 /* STRICT_LOW_PART can be discarded, around a MEM. */
2463 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2464 fixeddest = XEXP (fixeddest, 0);
2465 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2466 if (GET_CODE (fixeddest) == SUBREG)
2468 fixeddest = fixup_memory_subreg (fixeddest, insn,
2469 promoted_mode, 0);
2470 temp_mode = GET_MODE (fixeddest);
2472 else
2474 fixeddest = fixup_stack_1 (fixeddest, insn);
2475 temp_mode = promoted_mode;
2478 temp = gen_reg_rtx (temp_mode);
2480 emit_insn_after (gen_move_insn (fixeddest,
2481 gen_lowpart (GET_MODE (fixeddest),
2482 temp)),
2483 insn);
2485 SET_DEST (x) = temp;
2489 default:
2490 break;
2493 /* Nothing special about this RTX; fix its operands. */
2495 fmt = GET_RTX_FORMAT (code);
2496 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2498 if (fmt[i] == 'e')
2499 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2500 no_share);
2501 else if (fmt[i] == 'E')
2503 int j;
2504 for (j = 0; j < XVECLEN (x, i); j++)
2505 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2506 insn, replacements, no_share);
2511 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2512 The REG was placed on the stack, so X now has the form (SUBREG:m1
2513 (MEM:m2 ...)).
2515 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2516 must be emitted to compute NEWADDR, put them before INSN.
2518 UNCRITICAL nonzero means accept paradoxical subregs.
2519 This is used for subregs found inside REG_NOTES. */
2521 static rtx
2522 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2523 rtx x;
2524 rtx insn;
2525 enum machine_mode promoted_mode;
2526 int uncritical;
2528 int offset;
2529 rtx mem = SUBREG_REG (x);
2530 rtx addr = XEXP (mem, 0);
2531 enum machine_mode mode = GET_MODE (x);
2532 rtx result, seq;
2534 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2535 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2536 abort ();
2538 offset = SUBREG_BYTE (x);
2539 if (BYTES_BIG_ENDIAN)
2540 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2541 the offset so that it points to the right location within the
2542 MEM. */
2543 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2545 if (!flag_force_addr
2546 && memory_address_p (mode, plus_constant (addr, offset)))
2547 /* Shortcut if no insns need be emitted. */
2548 return adjust_address (mem, mode, offset);
2550 start_sequence ();
2551 result = adjust_address (mem, mode, offset);
2552 seq = get_insns ();
2553 end_sequence ();
2555 emit_insn_before (seq, insn);
2556 return result;
2559 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2560 Replace subexpressions of X in place.
2561 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2562 Otherwise return X, with its contents possibly altered.
2564 INSN, PROMOTED_MODE and UNCRITICAL are as for
2565 fixup_memory_subreg. */
2567 static rtx
2568 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2569 rtx x;
2570 rtx insn;
2571 enum machine_mode promoted_mode;
2572 int uncritical;
2574 enum rtx_code code;
2575 const char *fmt;
2576 int i;
2578 if (x == 0)
2579 return 0;
2581 code = GET_CODE (x);
2583 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2584 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2586 /* Nothing special about this RTX; fix its operands. */
2588 fmt = GET_RTX_FORMAT (code);
2589 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2591 if (fmt[i] == 'e')
2592 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2593 promoted_mode, uncritical);
2594 else if (fmt[i] == 'E')
2596 int j;
2597 for (j = 0; j < XVECLEN (x, i); j++)
2598 XVECEXP (x, i, j)
2599 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2600 promoted_mode, uncritical);
2603 return x;
2606 /* For each memory ref within X, if it refers to a stack slot
2607 with an out of range displacement, put the address in a temp register
2608 (emitting new insns before INSN to load these registers)
2609 and alter the memory ref to use that register.
2610 Replace each such MEM rtx with a copy, to avoid clobberage. */
2612 static rtx
2613 fixup_stack_1 (x, insn)
2614 rtx x;
2615 rtx insn;
2617 int i;
2618 RTX_CODE code = GET_CODE (x);
2619 const char *fmt;
2621 if (code == MEM)
2623 rtx ad = XEXP (x, 0);
2624 /* If we have address of a stack slot but it's not valid
2625 (displacement is too large), compute the sum in a register. */
2626 if (GET_CODE (ad) == PLUS
2627 && GET_CODE (XEXP (ad, 0)) == REG
2628 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2629 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2630 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2631 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2632 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2633 #endif
2634 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2635 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2636 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2637 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2639 rtx temp, seq;
2640 if (memory_address_p (GET_MODE (x), ad))
2641 return x;
2643 start_sequence ();
2644 temp = copy_to_reg (ad);
2645 seq = get_insns ();
2646 end_sequence ();
2647 emit_insn_before (seq, insn);
2648 return replace_equiv_address (x, temp);
2650 return x;
2653 fmt = GET_RTX_FORMAT (code);
2654 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2656 if (fmt[i] == 'e')
2657 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2658 else if (fmt[i] == 'E')
2660 int j;
2661 for (j = 0; j < XVECLEN (x, i); j++)
2662 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2665 return x;
2668 /* Optimization: a bit-field instruction whose field
2669 happens to be a byte or halfword in memory
2670 can be changed to a move instruction.
2672 We call here when INSN is an insn to examine or store into a bit-field.
2673 BODY is the SET-rtx to be altered.
2675 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2676 (Currently this is called only from function.c, and EQUIV_MEM
2677 is always 0.) */
2679 static void
2680 optimize_bit_field (body, insn, equiv_mem)
2681 rtx body;
2682 rtx insn;
2683 rtx *equiv_mem;
2685 rtx bitfield;
2686 int destflag;
2687 rtx seq = 0;
2688 enum machine_mode mode;
2690 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2691 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2692 bitfield = SET_DEST (body), destflag = 1;
2693 else
2694 bitfield = SET_SRC (body), destflag = 0;
2696 /* First check that the field being stored has constant size and position
2697 and is in fact a byte or halfword suitably aligned. */
2699 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2700 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2701 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2702 != BLKmode)
2703 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2705 rtx memref = 0;
2707 /* Now check that the containing word is memory, not a register,
2708 and that it is safe to change the machine mode. */
2710 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2711 memref = XEXP (bitfield, 0);
2712 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2713 && equiv_mem != 0)
2714 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2715 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2716 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2717 memref = SUBREG_REG (XEXP (bitfield, 0));
2718 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2719 && equiv_mem != 0
2720 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2721 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2723 if (memref
2724 && ! mode_dependent_address_p (XEXP (memref, 0))
2725 && ! MEM_VOLATILE_P (memref))
2727 /* Now adjust the address, first for any subreg'ing
2728 that we are now getting rid of,
2729 and then for which byte of the word is wanted. */
2731 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2732 rtx insns;
2734 /* Adjust OFFSET to count bits from low-address byte. */
2735 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2736 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2737 - offset - INTVAL (XEXP (bitfield, 1)));
2739 /* Adjust OFFSET to count bytes from low-address byte. */
2740 offset /= BITS_PER_UNIT;
2741 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2743 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2744 / UNITS_PER_WORD) * UNITS_PER_WORD;
2745 if (BYTES_BIG_ENDIAN)
2746 offset -= (MIN (UNITS_PER_WORD,
2747 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2748 - MIN (UNITS_PER_WORD,
2749 GET_MODE_SIZE (GET_MODE (memref))));
2752 start_sequence ();
2753 memref = adjust_address (memref, mode, offset);
2754 insns = get_insns ();
2755 end_sequence ();
2756 emit_insn_before (insns, insn);
2758 /* Store this memory reference where
2759 we found the bit field reference. */
2761 if (destflag)
2763 validate_change (insn, &SET_DEST (body), memref, 1);
2764 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2766 rtx src = SET_SRC (body);
2767 while (GET_CODE (src) == SUBREG
2768 && SUBREG_BYTE (src) == 0)
2769 src = SUBREG_REG (src);
2770 if (GET_MODE (src) != GET_MODE (memref))
2771 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2772 validate_change (insn, &SET_SRC (body), src, 1);
2774 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2775 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2776 /* This shouldn't happen because anything that didn't have
2777 one of these modes should have got converted explicitly
2778 and then referenced through a subreg.
2779 This is so because the original bit-field was
2780 handled by agg_mode and so its tree structure had
2781 the same mode that memref now has. */
2782 abort ();
2784 else
2786 rtx dest = SET_DEST (body);
2788 while (GET_CODE (dest) == SUBREG
2789 && SUBREG_BYTE (dest) == 0
2790 && (GET_MODE_CLASS (GET_MODE (dest))
2791 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2792 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2793 <= UNITS_PER_WORD))
2794 dest = SUBREG_REG (dest);
2796 validate_change (insn, &SET_DEST (body), dest, 1);
2798 if (GET_MODE (dest) == GET_MODE (memref))
2799 validate_change (insn, &SET_SRC (body), memref, 1);
2800 else
2802 /* Convert the mem ref to the destination mode. */
2803 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2805 start_sequence ();
2806 convert_move (newreg, memref,
2807 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2808 seq = get_insns ();
2809 end_sequence ();
2811 validate_change (insn, &SET_SRC (body), newreg, 1);
2815 /* See if we can convert this extraction or insertion into
2816 a simple move insn. We might not be able to do so if this
2817 was, for example, part of a PARALLEL.
2819 If we succeed, write out any needed conversions. If we fail,
2820 it is hard to guess why we failed, so don't do anything
2821 special; just let the optimization be suppressed. */
2823 if (apply_change_group () && seq)
2824 emit_insn_before (seq, insn);
2829 /* These routines are responsible for converting virtual register references
2830 to the actual hard register references once RTL generation is complete.
2832 The following four variables are used for communication between the
2833 routines. They contain the offsets of the virtual registers from their
2834 respective hard registers. */
2836 static int in_arg_offset;
2837 static int var_offset;
2838 static int dynamic_offset;
2839 static int out_arg_offset;
2840 static int cfa_offset;
2842 /* In most machines, the stack pointer register is equivalent to the bottom
2843 of the stack. */
2845 #ifndef STACK_POINTER_OFFSET
2846 #define STACK_POINTER_OFFSET 0
2847 #endif
2849 /* If not defined, pick an appropriate default for the offset of dynamically
2850 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2851 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2853 #ifndef STACK_DYNAMIC_OFFSET
2855 /* The bottom of the stack points to the actual arguments. If
2856 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2857 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2858 stack space for register parameters is not pushed by the caller, but
2859 rather part of the fixed stack areas and hence not included in
2860 `current_function_outgoing_args_size'. Nevertheless, we must allow
2861 for it when allocating stack dynamic objects. */
2863 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2864 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2865 ((ACCUMULATE_OUTGOING_ARGS \
2866 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2867 + (STACK_POINTER_OFFSET)) \
2869 #else
2870 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2871 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2872 + (STACK_POINTER_OFFSET))
2873 #endif
2874 #endif
2876 /* On most machines, the CFA coincides with the first incoming parm. */
2878 #ifndef ARG_POINTER_CFA_OFFSET
2879 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2880 #endif
2882 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2883 had its address taken. DECL is the decl or SAVE_EXPR for the
2884 object stored in the register, for later use if we do need to force
2885 REG into the stack. REG is overwritten by the MEM like in
2886 put_reg_into_stack. RESCAN is true if previously emitted
2887 instructions must be rescanned and modified now that the REG has
2888 been transformed. */
2891 gen_mem_addressof (reg, decl, rescan)
2892 rtx reg;
2893 tree decl;
2894 int rescan;
2896 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2897 REGNO (reg), decl);
2899 /* Calculate this before we start messing with decl's RTL. */
2900 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2902 /* If the original REG was a user-variable, then so is the REG whose
2903 address is being taken. Likewise for unchanging. */
2904 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2905 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2907 PUT_CODE (reg, MEM);
2908 MEM_ATTRS (reg) = 0;
2909 XEXP (reg, 0) = r;
2911 if (decl)
2913 tree type = TREE_TYPE (decl);
2914 enum machine_mode decl_mode
2915 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2916 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2917 : DECL_RTL_IF_SET (decl));
2919 PUT_MODE (reg, decl_mode);
2921 /* Clear DECL_RTL momentarily so functions below will work
2922 properly, then set it again. */
2923 if (DECL_P (decl) && decl_rtl == reg)
2924 SET_DECL_RTL (decl, 0);
2926 set_mem_attributes (reg, decl, 1);
2927 set_mem_alias_set (reg, set);
2929 if (DECL_P (decl) && decl_rtl == reg)
2930 SET_DECL_RTL (decl, reg);
2932 if (rescan
2933 && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
2934 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2936 else if (rescan)
2937 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2939 return reg;
2942 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2944 void
2945 flush_addressof (decl)
2946 tree decl;
2948 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2949 && DECL_RTL (decl) != 0
2950 && GET_CODE (DECL_RTL (decl)) == MEM
2951 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2952 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2953 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2956 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2958 static void
2959 put_addressof_into_stack (r, ht)
2960 rtx r;
2961 htab_t ht;
2963 tree decl, type;
2964 int volatile_p, used_p;
2966 rtx reg = XEXP (r, 0);
2968 if (GET_CODE (reg) != REG)
2969 abort ();
2971 decl = ADDRESSOF_DECL (r);
2972 if (decl)
2974 type = TREE_TYPE (decl);
2975 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2976 && TREE_THIS_VOLATILE (decl));
2977 used_p = (TREE_USED (decl)
2978 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2980 else
2982 type = NULL_TREE;
2983 volatile_p = 0;
2984 used_p = 1;
2987 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2988 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2991 /* List of replacements made below in purge_addressof_1 when creating
2992 bitfield insertions. */
2993 static rtx purge_bitfield_addressof_replacements;
2995 /* List of replacements made below in purge_addressof_1 for patterns
2996 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2997 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2998 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2999 enough in complex cases, e.g. when some field values can be
3000 extracted by usage MEM with narrower mode. */
3001 static rtx purge_addressof_replacements;
3003 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3004 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3005 the stack. If the function returns FALSE then the replacement could not
3006 be made. If MAY_POSTPONE is true and we would not put the addressof
3007 to stack, postpone processing of the insn. */
3009 static bool
3010 purge_addressof_1 (loc, insn, force, store, may_postpone, ht)
3011 rtx *loc;
3012 rtx insn;
3013 int force, store, may_postpone;
3014 htab_t ht;
3016 rtx x;
3017 RTX_CODE code;
3018 int i, j;
3019 const char *fmt;
3020 bool result = true;
3022 /* Re-start here to avoid recursion in common cases. */
3023 restart:
3025 x = *loc;
3026 if (x == 0)
3027 return true;
3029 code = GET_CODE (x);
3031 /* If we don't return in any of the cases below, we will recurse inside
3032 the RTX, which will normally result in any ADDRESSOF being forced into
3033 memory. */
3034 if (code == SET)
3036 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1,
3037 may_postpone, ht);
3038 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0,
3039 may_postpone, ht);
3040 return result;
3042 else if (code == ADDRESSOF)
3044 rtx sub, insns;
3046 if (GET_CODE (XEXP (x, 0)) != MEM)
3047 put_addressof_into_stack (x, ht);
3049 /* We must create a copy of the rtx because it was created by
3050 overwriting a REG rtx which is always shared. */
3051 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3052 if (validate_change (insn, loc, sub, 0)
3053 || validate_replace_rtx (x, sub, insn))
3054 return true;
3056 start_sequence ();
3058 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3059 Otherwise, perhaps SUB is an expression, so generate code to compute
3060 it. */
3061 if (GET_CODE (sub) == REG && REGNO (sub) <= LAST_VIRTUAL_REGISTER)
3062 sub = copy_to_reg (sub);
3063 else
3064 sub = force_operand (sub, NULL_RTX);
3066 if (! validate_change (insn, loc, sub, 0)
3067 && ! validate_replace_rtx (x, sub, insn))
3068 abort ();
3070 insns = get_insns ();
3071 end_sequence ();
3072 emit_insn_before (insns, insn);
3073 return true;
3076 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3078 rtx sub = XEXP (XEXP (x, 0), 0);
3080 if (GET_CODE (sub) == MEM)
3081 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3082 else if (GET_CODE (sub) == REG
3083 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3085 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3087 int size_x, size_sub;
3089 if (may_postpone)
3091 /* Postpone for now, so that we do not emit bitfield arithmetics
3092 unless there is some benefit from it. */
3093 if (!postponed_insns || XEXP (postponed_insns, 0) != insn)
3094 postponed_insns = alloc_INSN_LIST (insn, postponed_insns);
3095 return true;
3098 if (!insn)
3100 /* When processing REG_NOTES look at the list of
3101 replacements done on the insn to find the register that X
3102 was replaced by. */
3103 rtx tem;
3105 for (tem = purge_bitfield_addressof_replacements;
3106 tem != NULL_RTX;
3107 tem = XEXP (XEXP (tem, 1), 1))
3108 if (rtx_equal_p (x, XEXP (tem, 0)))
3110 *loc = XEXP (XEXP (tem, 1), 0);
3111 return true;
3114 /* See comment for purge_addressof_replacements. */
3115 for (tem = purge_addressof_replacements;
3116 tem != NULL_RTX;
3117 tem = XEXP (XEXP (tem, 1), 1))
3118 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3120 rtx z = XEXP (XEXP (tem, 1), 0);
3122 if (GET_MODE (x) == GET_MODE (z)
3123 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3124 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3125 abort ();
3127 /* It can happen that the note may speak of things
3128 in a wider (or just different) mode than the
3129 code did. This is especially true of
3130 REG_RETVAL. */
3132 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3133 z = SUBREG_REG (z);
3135 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3136 && (GET_MODE_SIZE (GET_MODE (x))
3137 > GET_MODE_SIZE (GET_MODE (z))))
3139 /* This can occur as a result in invalid
3140 pointer casts, e.g. float f; ...
3141 *(long long int *)&f.
3142 ??? We could emit a warning here, but
3143 without a line number that wouldn't be
3144 very helpful. */
3145 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3147 else
3148 z = gen_lowpart (GET_MODE (x), z);
3150 *loc = z;
3151 return true;
3154 /* When we are processing the REG_NOTES of the last instruction
3155 of a libcall, there will be typically no replacements
3156 for that insn; the replacements happened before, piecemeal
3157 fashion. OTOH we are not interested in the details of
3158 this for the REG_EQUAL note, we want to know the big picture,
3159 which can be succinctly described with a simple SUBREG.
3160 Note that removing the REG_EQUAL note is not an option
3161 on the last insn of a libcall, so we must do a replacement. */
3162 if (! purge_addressof_replacements
3163 && ! purge_bitfield_addressof_replacements)
3165 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3166 we got
3167 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3168 [0 S8 A32]), which can be expressed with a simple
3169 same-size subreg */
3170 if ((GET_MODE_SIZE (GET_MODE (x))
3171 == GET_MODE_SIZE (GET_MODE (sub)))
3172 /* Again, invalid pointer casts (as in
3173 compile/990203-1.c) can require paradoxical
3174 subregs. */
3175 || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3176 && (GET_MODE_SIZE (GET_MODE (x))
3177 > GET_MODE_SIZE (GET_MODE (sub)))))
3179 *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
3180 return true;
3182 /* ??? Are there other cases we should handle? */
3184 /* Sometimes we may not be able to find the replacement. For
3185 example when the original insn was a MEM in a wider mode,
3186 and the note is part of a sign extension of a narrowed
3187 version of that MEM. Gcc testcase compile/990829-1.c can
3188 generate an example of this situation. Rather than complain
3189 we return false, which will prompt our caller to remove the
3190 offending note. */
3191 return false;
3194 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3195 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3197 /* Do not frob unchanging MEMs. If a later reference forces the
3198 pseudo to the stack, we can wind up with multiple writes to
3199 an unchanging memory, which is invalid. */
3200 if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3203 /* Don't even consider working with paradoxical subregs,
3204 or the moral equivalent seen here. */
3205 else if (size_x <= size_sub
3206 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3208 /* Do a bitfield insertion to mirror what would happen
3209 in memory. */
3211 rtx val, seq;
3213 if (store)
3215 rtx p = PREV_INSN (insn);
3217 start_sequence ();
3218 val = gen_reg_rtx (GET_MODE (x));
3219 if (! validate_change (insn, loc, val, 0))
3221 /* Discard the current sequence and put the
3222 ADDRESSOF on stack. */
3223 end_sequence ();
3224 goto give_up;
3226 seq = get_insns ();
3227 end_sequence ();
3228 emit_insn_before (seq, insn);
3229 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3230 insn, ht);
3232 start_sequence ();
3233 store_bit_field (sub, size_x, 0, GET_MODE (x),
3234 val, GET_MODE_SIZE (GET_MODE (sub)));
3236 /* Make sure to unshare any shared rtl that store_bit_field
3237 might have created. */
3238 unshare_all_rtl_again (get_insns ());
3240 seq = get_insns ();
3241 end_sequence ();
3242 p = emit_insn_after (seq, insn);
3243 if (NEXT_INSN (insn))
3244 compute_insns_for_mem (NEXT_INSN (insn),
3245 p ? NEXT_INSN (p) : NULL_RTX,
3246 ht);
3248 else
3250 rtx p = PREV_INSN (insn);
3252 start_sequence ();
3253 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3254 GET_MODE (x), GET_MODE (x),
3255 GET_MODE_SIZE (GET_MODE (sub)));
3257 if (! validate_change (insn, loc, val, 0))
3259 /* Discard the current sequence and put the
3260 ADDRESSOF on stack. */
3261 end_sequence ();
3262 goto give_up;
3265 seq = get_insns ();
3266 end_sequence ();
3267 emit_insn_before (seq, insn);
3268 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3269 insn, ht);
3272 /* Remember the replacement so that the same one can be done
3273 on the REG_NOTES. */
3274 purge_bitfield_addressof_replacements
3275 = gen_rtx_EXPR_LIST (VOIDmode, x,
3276 gen_rtx_EXPR_LIST
3277 (VOIDmode, val,
3278 purge_bitfield_addressof_replacements));
3280 /* We replaced with a reg -- all done. */
3281 return true;
3285 else if (validate_change (insn, loc, sub, 0))
3287 /* Remember the replacement so that the same one can be done
3288 on the REG_NOTES. */
3289 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3291 rtx tem;
3293 for (tem = purge_addressof_replacements;
3294 tem != NULL_RTX;
3295 tem = XEXP (XEXP (tem, 1), 1))
3296 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3298 XEXP (XEXP (tem, 1), 0) = sub;
3299 return true;
3301 purge_addressof_replacements
3302 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3303 gen_rtx_EXPR_LIST (VOIDmode, sub,
3304 purge_addressof_replacements));
3305 return true;
3307 goto restart;
3311 give_up:
3312 /* Scan all subexpressions. */
3313 fmt = GET_RTX_FORMAT (code);
3314 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3316 if (*fmt == 'e')
3317 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0,
3318 may_postpone, ht);
3319 else if (*fmt == 'E')
3320 for (j = 0; j < XVECLEN (x, i); j++)
3321 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0,
3322 may_postpone, ht);
3325 return result;
3328 /* Return a hash value for K, a REG. */
3330 static hashval_t
3331 insns_for_mem_hash (k)
3332 const void * k;
3334 /* Use the address of the key for the hash value. */
3335 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3336 return htab_hash_pointer (m->key);
3339 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3341 static int
3342 insns_for_mem_comp (k1, k2)
3343 const void * k1;
3344 const void * k2;
3346 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3347 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3348 return m1->key == m2->key;
3351 struct insns_for_mem_walk_info
3353 /* The hash table that we are using to record which INSNs use which
3354 MEMs. */
3355 htab_t ht;
3357 /* The INSN we are currently processing. */
3358 rtx insn;
3360 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3361 to find the insns that use the REGs in the ADDRESSOFs. */
3362 int pass;
3365 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3366 that might be used in an ADDRESSOF expression, record this INSN in
3367 the hash table given by DATA (which is really a pointer to an
3368 insns_for_mem_walk_info structure). */
3370 static int
3371 insns_for_mem_walk (r, data)
3372 rtx *r;
3373 void *data;
3375 struct insns_for_mem_walk_info *ifmwi
3376 = (struct insns_for_mem_walk_info *) data;
3377 struct insns_for_mem_entry tmp;
3378 tmp.insns = NULL_RTX;
3380 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3381 && GET_CODE (XEXP (*r, 0)) == REG)
3383 PTR *e;
3384 tmp.key = XEXP (*r, 0);
3385 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3386 if (*e == NULL)
3388 *e = ggc_alloc (sizeof (tmp));
3389 memcpy (*e, &tmp, sizeof (tmp));
3392 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3394 struct insns_for_mem_entry *ifme;
3395 tmp.key = *r;
3396 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3398 /* If we have not already recorded this INSN, do so now. Since
3399 we process the INSNs in order, we know that if we have
3400 recorded it it must be at the front of the list. */
3401 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3402 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3403 ifme->insns);
3406 return 0;
3409 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3410 which REGs in HT. */
3412 static void
3413 compute_insns_for_mem (insns, last_insn, ht)
3414 rtx insns;
3415 rtx last_insn;
3416 htab_t ht;
3418 rtx insn;
3419 struct insns_for_mem_walk_info ifmwi;
3420 ifmwi.ht = ht;
3422 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3423 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3424 if (INSN_P (insn))
3426 ifmwi.insn = insn;
3427 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3431 /* Helper function for purge_addressof called through for_each_rtx.
3432 Returns true iff the rtl is an ADDRESSOF. */
3434 static int
3435 is_addressof (rtl, data)
3436 rtx *rtl;
3437 void *data ATTRIBUTE_UNUSED;
3439 return GET_CODE (*rtl) == ADDRESSOF;
3442 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3443 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3444 stack. */
3446 void
3447 purge_addressof (insns)
3448 rtx insns;
3450 rtx insn, tmp;
3451 htab_t ht;
3453 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3454 requires a fixup pass over the instruction stream to correct
3455 INSNs that depended on the REG being a REG, and not a MEM. But,
3456 these fixup passes are slow. Furthermore, most MEMs are not
3457 mentioned in very many instructions. So, we speed up the process
3458 by pre-calculating which REGs occur in which INSNs; that allows
3459 us to perform the fixup passes much more quickly. */
3460 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3461 compute_insns_for_mem (insns, NULL_RTX, ht);
3463 postponed_insns = NULL;
3465 for (insn = insns; insn; insn = NEXT_INSN (insn))
3466 if (INSN_P (insn))
3468 if (! purge_addressof_1 (&PATTERN (insn), insn,
3469 asm_noperands (PATTERN (insn)) > 0, 0, 1, ht))
3470 /* If we could not replace the ADDRESSOFs in the insn,
3471 something is wrong. */
3472 abort ();
3474 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, 0, ht))
3476 /* If we could not replace the ADDRESSOFs in the insn's notes,
3477 we can just remove the offending notes instead. */
3478 rtx note;
3480 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3482 /* If we find a REG_RETVAL note then the insn is a libcall.
3483 Such insns must have REG_EQUAL notes as well, in order
3484 for later passes of the compiler to work. So it is not
3485 safe to delete the notes here, and instead we abort. */
3486 if (REG_NOTE_KIND (note) == REG_RETVAL)
3487 abort ();
3488 if (for_each_rtx (&note, is_addressof, NULL))
3489 remove_note (insn, note);
3494 /* Process the postponed insns. */
3495 while (postponed_insns)
3497 insn = XEXP (postponed_insns, 0);
3498 tmp = postponed_insns;
3499 postponed_insns = XEXP (postponed_insns, 1);
3500 free_INSN_LIST_node (tmp);
3502 if (! purge_addressof_1 (&PATTERN (insn), insn,
3503 asm_noperands (PATTERN (insn)) > 0, 0, 0, ht))
3504 abort ();
3507 /* Clean up. */
3508 purge_bitfield_addressof_replacements = 0;
3509 purge_addressof_replacements = 0;
3511 /* REGs are shared. purge_addressof will destructively replace a REG
3512 with a MEM, which creates shared MEMs.
3514 Unfortunately, the children of put_reg_into_stack assume that MEMs
3515 referring to the same stack slot are shared (fixup_var_refs and
3516 the associated hash table code).
3518 So, we have to do another unsharing pass after we have flushed any
3519 REGs that had their address taken into the stack.
3521 It may be worth tracking whether or not we converted any REGs into
3522 MEMs to avoid this overhead when it is not needed. */
3523 unshare_all_rtl_again (get_insns ());
3526 /* Convert a SET of a hard subreg to a set of the appropriate hard
3527 register. A subroutine of purge_hard_subreg_sets. */
3529 static void
3530 purge_single_hard_subreg_set (pattern)
3531 rtx pattern;
3533 rtx reg = SET_DEST (pattern);
3534 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3535 int offset = 0;
3537 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3538 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3540 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3541 GET_MODE (SUBREG_REG (reg)),
3542 SUBREG_BYTE (reg),
3543 GET_MODE (reg));
3544 reg = SUBREG_REG (reg);
3548 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3550 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3551 SET_DEST (pattern) = reg;
3555 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3556 only such SETs that we expect to see are those left in because
3557 integrate can't handle sets of parts of a return value register.
3559 We don't use alter_subreg because we only want to eliminate subregs
3560 of hard registers. */
3562 void
3563 purge_hard_subreg_sets (insn)
3564 rtx insn;
3566 for (; insn; insn = NEXT_INSN (insn))
3568 if (INSN_P (insn))
3570 rtx pattern = PATTERN (insn);
3571 switch (GET_CODE (pattern))
3573 case SET:
3574 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3575 purge_single_hard_subreg_set (pattern);
3576 break;
3577 case PARALLEL:
3579 int j;
3580 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3582 rtx inner_pattern = XVECEXP (pattern, 0, j);
3583 if (GET_CODE (inner_pattern) == SET
3584 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3585 purge_single_hard_subreg_set (inner_pattern);
3588 break;
3589 default:
3590 break;
3596 /* Pass through the INSNS of function FNDECL and convert virtual register
3597 references to hard register references. */
3599 void
3600 instantiate_virtual_regs (fndecl, insns)
3601 tree fndecl;
3602 rtx insns;
3604 rtx insn;
3605 unsigned int i;
3607 /* Compute the offsets to use for this function. */
3608 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3609 var_offset = STARTING_FRAME_OFFSET;
3610 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3611 out_arg_offset = STACK_POINTER_OFFSET;
3612 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3614 /* Scan all variables and parameters of this function. For each that is
3615 in memory, instantiate all virtual registers if the result is a valid
3616 address. If not, we do it later. That will handle most uses of virtual
3617 regs on many machines. */
3618 instantiate_decls (fndecl, 1);
3620 /* Initialize recognition, indicating that volatile is OK. */
3621 init_recog ();
3623 /* Scan through all the insns, instantiating every virtual register still
3624 present. */
3625 for (insn = insns; insn; insn = NEXT_INSN (insn))
3626 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3627 || GET_CODE (insn) == CALL_INSN)
3629 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3630 if (INSN_DELETED_P (insn))
3631 continue;
3632 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3633 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3634 if (GET_CODE (insn) == CALL_INSN)
3635 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3636 NULL_RTX, 0);
3638 /* Past this point all ASM statements should match. Verify that
3639 to avoid failures later in the compilation process. */
3640 if (asm_noperands (PATTERN (insn)) >= 0
3641 && ! check_asm_operands (PATTERN (insn)))
3642 instantiate_virtual_regs_lossage (insn);
3645 /* Instantiate the stack slots for the parm registers, for later use in
3646 addressof elimination. */
3647 for (i = 0; i < max_parm_reg; ++i)
3648 if (parm_reg_stack_loc[i])
3649 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3651 /* Now instantiate the remaining register equivalences for debugging info.
3652 These will not be valid addresses. */
3653 instantiate_decls (fndecl, 0);
3655 /* Indicate that, from now on, assign_stack_local should use
3656 frame_pointer_rtx. */
3657 virtuals_instantiated = 1;
3660 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3661 all virtual registers in their DECL_RTL's.
3663 If VALID_ONLY, do this only if the resulting address is still valid.
3664 Otherwise, always do it. */
3666 static void
3667 instantiate_decls (fndecl, valid_only)
3668 tree fndecl;
3669 int valid_only;
3671 tree decl;
3673 /* Process all parameters of the function. */
3674 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3676 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3677 HOST_WIDE_INT size_rtl;
3679 instantiate_decl (DECL_RTL (decl), size, valid_only);
3681 /* If the parameter was promoted, then the incoming RTL mode may be
3682 larger than the declared type size. We must use the larger of
3683 the two sizes. */
3684 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3685 size = MAX (size_rtl, size);
3686 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3689 /* Now process all variables defined in the function or its subblocks. */
3690 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3693 /* Subroutine of instantiate_decls: Process all decls in the given
3694 BLOCK node and all its subblocks. */
3696 static void
3697 instantiate_decls_1 (let, valid_only)
3698 tree let;
3699 int valid_only;
3701 tree t;
3703 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3704 if (DECL_RTL_SET_P (t))
3705 instantiate_decl (DECL_RTL (t),
3706 int_size_in_bytes (TREE_TYPE (t)),
3707 valid_only);
3709 /* Process all subblocks. */
3710 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3711 instantiate_decls_1 (t, valid_only);
3714 /* Subroutine of the preceding procedures: Given RTL representing a
3715 decl and the size of the object, do any instantiation required.
3717 If VALID_ONLY is nonzero, it means that the RTL should only be
3718 changed if the new address is valid. */
3720 static void
3721 instantiate_decl (x, size, valid_only)
3722 rtx x;
3723 HOST_WIDE_INT size;
3724 int valid_only;
3726 enum machine_mode mode;
3727 rtx addr;
3729 /* If this is not a MEM, no need to do anything. Similarly if the
3730 address is a constant or a register that is not a virtual register. */
3732 if (x == 0 || GET_CODE (x) != MEM)
3733 return;
3735 addr = XEXP (x, 0);
3736 if (CONSTANT_P (addr)
3737 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3738 || (GET_CODE (addr) == REG
3739 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3740 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3741 return;
3743 /* If we should only do this if the address is valid, copy the address.
3744 We need to do this so we can undo any changes that might make the
3745 address invalid. This copy is unfortunate, but probably can't be
3746 avoided. */
3748 if (valid_only)
3749 addr = copy_rtx (addr);
3751 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3753 if (valid_only && size >= 0)
3755 unsigned HOST_WIDE_INT decl_size = size;
3757 /* Now verify that the resulting address is valid for every integer or
3758 floating-point mode up to and including SIZE bytes long. We do this
3759 since the object might be accessed in any mode and frame addresses
3760 are shared. */
3762 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3763 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3764 mode = GET_MODE_WIDER_MODE (mode))
3765 if (! memory_address_p (mode, addr))
3766 return;
3768 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3769 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3770 mode = GET_MODE_WIDER_MODE (mode))
3771 if (! memory_address_p (mode, addr))
3772 return;
3775 /* Put back the address now that we have updated it and we either know
3776 it is valid or we don't care whether it is valid. */
3778 XEXP (x, 0) = addr;
3781 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3782 is a virtual register, return the equivalent hard register and set the
3783 offset indirectly through the pointer. Otherwise, return 0. */
3785 static rtx
3786 instantiate_new_reg (x, poffset)
3787 rtx x;
3788 HOST_WIDE_INT *poffset;
3790 rtx new;
3791 HOST_WIDE_INT offset;
3793 if (x == virtual_incoming_args_rtx)
3794 new = arg_pointer_rtx, offset = in_arg_offset;
3795 else if (x == virtual_stack_vars_rtx)
3796 new = frame_pointer_rtx, offset = var_offset;
3797 else if (x == virtual_stack_dynamic_rtx)
3798 new = stack_pointer_rtx, offset = dynamic_offset;
3799 else if (x == virtual_outgoing_args_rtx)
3800 new = stack_pointer_rtx, offset = out_arg_offset;
3801 else if (x == virtual_cfa_rtx)
3802 new = arg_pointer_rtx, offset = cfa_offset;
3803 else
3804 return 0;
3806 *poffset = offset;
3807 return new;
3811 /* Called when instantiate_virtual_regs has failed to update the instruction.
3812 Usually this means that non-matching instruction has been emit, however for
3813 asm statements it may be the problem in the constraints. */
3814 static void
3815 instantiate_virtual_regs_lossage (insn)
3816 rtx insn;
3818 if (asm_noperands (PATTERN (insn)) >= 0)
3820 error_for_asm (insn, "impossible constraint in `asm'");
3821 delete_insn (insn);
3823 else
3824 abort ();
3826 /* Given a pointer to a piece of rtx and an optional pointer to the
3827 containing object, instantiate any virtual registers present in it.
3829 If EXTRA_INSNS, we always do the replacement and generate
3830 any extra insns before OBJECT. If it zero, we do nothing if replacement
3831 is not valid.
3833 Return 1 if we either had nothing to do or if we were able to do the
3834 needed replacement. Return 0 otherwise; we only return zero if
3835 EXTRA_INSNS is zero.
3837 We first try some simple transformations to avoid the creation of extra
3838 pseudos. */
3840 static int
3841 instantiate_virtual_regs_1 (loc, object, extra_insns)
3842 rtx *loc;
3843 rtx object;
3844 int extra_insns;
3846 rtx x;
3847 RTX_CODE code;
3848 rtx new = 0;
3849 HOST_WIDE_INT offset = 0;
3850 rtx temp;
3851 rtx seq;
3852 int i, j;
3853 const char *fmt;
3855 /* Re-start here to avoid recursion in common cases. */
3856 restart:
3858 x = *loc;
3859 if (x == 0)
3860 return 1;
3862 /* We may have detected and deleted invalid asm statements. */
3863 if (object && INSN_P (object) && INSN_DELETED_P (object))
3864 return 1;
3866 code = GET_CODE (x);
3868 /* Check for some special cases. */
3869 switch (code)
3871 case CONST_INT:
3872 case CONST_DOUBLE:
3873 case CONST_VECTOR:
3874 case CONST:
3875 case SYMBOL_REF:
3876 case CODE_LABEL:
3877 case PC:
3878 case CC0:
3879 case ASM_INPUT:
3880 case ADDR_VEC:
3881 case ADDR_DIFF_VEC:
3882 case RETURN:
3883 return 1;
3885 case SET:
3886 /* We are allowed to set the virtual registers. This means that
3887 the actual register should receive the source minus the
3888 appropriate offset. This is used, for example, in the handling
3889 of non-local gotos. */
3890 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3892 rtx src = SET_SRC (x);
3894 /* We are setting the register, not using it, so the relevant
3895 offset is the negative of the offset to use were we using
3896 the register. */
3897 offset = - offset;
3898 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3900 /* The only valid sources here are PLUS or REG. Just do
3901 the simplest possible thing to handle them. */
3902 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3904 instantiate_virtual_regs_lossage (object);
3905 return 1;
3908 start_sequence ();
3909 if (GET_CODE (src) != REG)
3910 temp = force_operand (src, NULL_RTX);
3911 else
3912 temp = src;
3913 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3914 seq = get_insns ();
3915 end_sequence ();
3917 emit_insn_before (seq, object);
3918 SET_DEST (x) = new;
3920 if (! validate_change (object, &SET_SRC (x), temp, 0)
3921 || ! extra_insns)
3922 instantiate_virtual_regs_lossage (object);
3924 return 1;
3927 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3928 loc = &SET_SRC (x);
3929 goto restart;
3931 case PLUS:
3932 /* Handle special case of virtual register plus constant. */
3933 if (CONSTANT_P (XEXP (x, 1)))
3935 rtx old, new_offset;
3937 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3938 if (GET_CODE (XEXP (x, 0)) == PLUS)
3940 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3942 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3943 extra_insns);
3944 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3946 else
3948 loc = &XEXP (x, 0);
3949 goto restart;
3953 #ifdef POINTERS_EXTEND_UNSIGNED
3954 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3955 we can commute the PLUS and SUBREG because pointers into the
3956 frame are well-behaved. */
3957 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3958 && GET_CODE (XEXP (x, 1)) == CONST_INT
3959 && 0 != (new
3960 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3961 &offset))
3962 && validate_change (object, loc,
3963 plus_constant (gen_lowpart (ptr_mode,
3964 new),
3965 offset
3966 + INTVAL (XEXP (x, 1))),
3968 return 1;
3969 #endif
3970 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3972 /* We know the second operand is a constant. Unless the
3973 first operand is a REG (which has been already checked),
3974 it needs to be checked. */
3975 if (GET_CODE (XEXP (x, 0)) != REG)
3977 loc = &XEXP (x, 0);
3978 goto restart;
3980 return 1;
3983 new_offset = plus_constant (XEXP (x, 1), offset);
3985 /* If the new constant is zero, try to replace the sum with just
3986 the register. */
3987 if (new_offset == const0_rtx
3988 && validate_change (object, loc, new, 0))
3989 return 1;
3991 /* Next try to replace the register and new offset.
3992 There are two changes to validate here and we can't assume that
3993 in the case of old offset equals new just changing the register
3994 will yield a valid insn. In the interests of a little efficiency,
3995 however, we only call validate change once (we don't queue up the
3996 changes and then call apply_change_group). */
3998 old = XEXP (x, 0);
3999 if (offset == 0
4000 ? ! validate_change (object, &XEXP (x, 0), new, 0)
4001 : (XEXP (x, 0) = new,
4002 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
4004 if (! extra_insns)
4006 XEXP (x, 0) = old;
4007 return 0;
4010 /* Otherwise copy the new constant into a register and replace
4011 constant with that register. */
4012 temp = gen_reg_rtx (Pmode);
4013 XEXP (x, 0) = new;
4014 if (validate_change (object, &XEXP (x, 1), temp, 0))
4015 emit_insn_before (gen_move_insn (temp, new_offset), object);
4016 else
4018 /* If that didn't work, replace this expression with a
4019 register containing the sum. */
4021 XEXP (x, 0) = old;
4022 new = gen_rtx_PLUS (Pmode, new, new_offset);
4024 start_sequence ();
4025 temp = force_operand (new, NULL_RTX);
4026 seq = get_insns ();
4027 end_sequence ();
4029 emit_insn_before (seq, object);
4030 if (! validate_change (object, loc, temp, 0)
4031 && ! validate_replace_rtx (x, temp, object))
4033 instantiate_virtual_regs_lossage (object);
4034 return 1;
4039 return 1;
4042 /* Fall through to generic two-operand expression case. */
4043 case EXPR_LIST:
4044 case CALL:
4045 case COMPARE:
4046 case MINUS:
4047 case MULT:
4048 case DIV: case UDIV:
4049 case MOD: case UMOD:
4050 case AND: case IOR: case XOR:
4051 case ROTATERT: case ROTATE:
4052 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4053 case NE: case EQ:
4054 case GE: case GT: case GEU: case GTU:
4055 case LE: case LT: case LEU: case LTU:
4056 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
4057 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
4058 loc = &XEXP (x, 0);
4059 goto restart;
4061 case MEM:
4062 /* Most cases of MEM that convert to valid addresses have already been
4063 handled by our scan of decls. The only special handling we
4064 need here is to make a copy of the rtx to ensure it isn't being
4065 shared if we have to change it to a pseudo.
4067 If the rtx is a simple reference to an address via a virtual register,
4068 it can potentially be shared. In such cases, first try to make it
4069 a valid address, which can also be shared. Otherwise, copy it and
4070 proceed normally.
4072 First check for common cases that need no processing. These are
4073 usually due to instantiation already being done on a previous instance
4074 of a shared rtx. */
4076 temp = XEXP (x, 0);
4077 if (CONSTANT_ADDRESS_P (temp)
4078 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4079 || temp == arg_pointer_rtx
4080 #endif
4081 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4082 || temp == hard_frame_pointer_rtx
4083 #endif
4084 || temp == frame_pointer_rtx)
4085 return 1;
4087 if (GET_CODE (temp) == PLUS
4088 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4089 && (XEXP (temp, 0) == frame_pointer_rtx
4090 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4091 || XEXP (temp, 0) == hard_frame_pointer_rtx
4092 #endif
4093 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4094 || XEXP (temp, 0) == arg_pointer_rtx
4095 #endif
4097 return 1;
4099 if (temp == virtual_stack_vars_rtx
4100 || temp == virtual_incoming_args_rtx
4101 || (GET_CODE (temp) == PLUS
4102 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4103 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4104 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4106 /* This MEM may be shared. If the substitution can be done without
4107 the need to generate new pseudos, we want to do it in place
4108 so all copies of the shared rtx benefit. The call below will
4109 only make substitutions if the resulting address is still
4110 valid.
4112 Note that we cannot pass X as the object in the recursive call
4113 since the insn being processed may not allow all valid
4114 addresses. However, if we were not passed on object, we can
4115 only modify X without copying it if X will have a valid
4116 address.
4118 ??? Also note that this can still lose if OBJECT is an insn that
4119 has less restrictions on an address that some other insn.
4120 In that case, we will modify the shared address. This case
4121 doesn't seem very likely, though. One case where this could
4122 happen is in the case of a USE or CLOBBER reference, but we
4123 take care of that below. */
4125 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4126 object ? object : x, 0))
4127 return 1;
4129 /* Otherwise make a copy and process that copy. We copy the entire
4130 RTL expression since it might be a PLUS which could also be
4131 shared. */
4132 *loc = x = copy_rtx (x);
4135 /* Fall through to generic unary operation case. */
4136 case PREFETCH:
4137 case SUBREG:
4138 case STRICT_LOW_PART:
4139 case NEG: case NOT:
4140 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4141 case SIGN_EXTEND: case ZERO_EXTEND:
4142 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4143 case FLOAT: case FIX:
4144 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4145 case ABS:
4146 case SQRT:
4147 case FFS:
4148 case CLZ: case CTZ:
4149 case POPCOUNT: case PARITY:
4150 /* These case either have just one operand or we know that we need not
4151 check the rest of the operands. */
4152 loc = &XEXP (x, 0);
4153 goto restart;
4155 case USE:
4156 case CLOBBER:
4157 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4158 go ahead and make the invalid one, but do it to a copy. For a REG,
4159 just make the recursive call, since there's no chance of a problem. */
4161 if ((GET_CODE (XEXP (x, 0)) == MEM
4162 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4164 || (GET_CODE (XEXP (x, 0)) == REG
4165 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4166 return 1;
4168 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4169 loc = &XEXP (x, 0);
4170 goto restart;
4172 case REG:
4173 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4174 in front of this insn and substitute the temporary. */
4175 if ((new = instantiate_new_reg (x, &offset)) != 0)
4177 temp = plus_constant (new, offset);
4178 if (!validate_change (object, loc, temp, 0))
4180 if (! extra_insns)
4181 return 0;
4183 start_sequence ();
4184 temp = force_operand (temp, NULL_RTX);
4185 seq = get_insns ();
4186 end_sequence ();
4188 emit_insn_before (seq, object);
4189 if (! validate_change (object, loc, temp, 0)
4190 && ! validate_replace_rtx (x, temp, object))
4191 instantiate_virtual_regs_lossage (object);
4195 return 1;
4197 case ADDRESSOF:
4198 if (GET_CODE (XEXP (x, 0)) == REG)
4199 return 1;
4201 else if (GET_CODE (XEXP (x, 0)) == MEM)
4203 /* If we have a (addressof (mem ..)), do any instantiation inside
4204 since we know we'll be making the inside valid when we finally
4205 remove the ADDRESSOF. */
4206 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4207 return 1;
4209 break;
4211 default:
4212 break;
4215 /* Scan all subexpressions. */
4216 fmt = GET_RTX_FORMAT (code);
4217 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4218 if (*fmt == 'e')
4220 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4221 return 0;
4223 else if (*fmt == 'E')
4224 for (j = 0; j < XVECLEN (x, i); j++)
4225 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4226 extra_insns))
4227 return 0;
4229 return 1;
4232 /* Optimization: assuming this function does not receive nonlocal gotos,
4233 delete the handlers for such, as well as the insns to establish
4234 and disestablish them. */
4236 static void
4237 delete_handlers ()
4239 rtx insn;
4240 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4242 /* Delete the handler by turning off the flag that would
4243 prevent jump_optimize from deleting it.
4244 Also permit deletion of the nonlocal labels themselves
4245 if nothing local refers to them. */
4246 if (GET_CODE (insn) == CODE_LABEL)
4248 tree t, last_t;
4250 LABEL_PRESERVE_P (insn) = 0;
4252 /* Remove it from the nonlocal_label list, to avoid confusing
4253 flow. */
4254 for (t = nonlocal_labels, last_t = 0; t;
4255 last_t = t, t = TREE_CHAIN (t))
4256 if (DECL_RTL (TREE_VALUE (t)) == insn)
4257 break;
4258 if (t)
4260 if (! last_t)
4261 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4262 else
4263 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4266 if (GET_CODE (insn) == INSN)
4268 int can_delete = 0;
4269 rtx t;
4270 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4271 if (reg_mentioned_p (t, PATTERN (insn)))
4273 can_delete = 1;
4274 break;
4276 if (can_delete
4277 || (nonlocal_goto_stack_level != 0
4278 && reg_mentioned_p (nonlocal_goto_stack_level,
4279 PATTERN (insn))))
4280 delete_related_insns (insn);
4285 /* Return the first insn following those generated by `assign_parms'. */
4288 get_first_nonparm_insn ()
4290 if (last_parm_insn)
4291 return NEXT_INSN (last_parm_insn);
4292 return get_insns ();
4295 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4296 This means a type for which function calls must pass an address to the
4297 function or get an address back from the function.
4298 EXP may be a type node or an expression (whose type is tested). */
4301 aggregate_value_p (exp)
4302 tree exp;
4304 int i, regno, nregs;
4305 rtx reg;
4307 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4309 if (TREE_CODE (type) == VOID_TYPE)
4310 return 0;
4311 if (RETURN_IN_MEMORY (type))
4312 return 1;
4313 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4314 and thus can't be returned in registers. */
4315 if (TREE_ADDRESSABLE (type))
4316 return 1;
4317 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4318 return 1;
4319 /* Make sure we have suitable call-clobbered regs to return
4320 the value in; if not, we must return it in memory. */
4321 reg = hard_function_value (type, 0, 0);
4323 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4324 it is OK. */
4325 if (GET_CODE (reg) != REG)
4326 return 0;
4328 regno = REGNO (reg);
4329 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4330 for (i = 0; i < nregs; i++)
4331 if (! call_used_regs[regno + i])
4332 return 1;
4333 return 0;
4336 /* Assign RTL expressions to the function's parameters.
4337 This may involve copying them into registers and using
4338 those registers as the RTL for them. */
4340 void
4341 assign_parms (fndecl)
4342 tree fndecl;
4344 tree parm;
4345 CUMULATIVE_ARGS args_so_far;
4346 /* Total space needed so far for args on the stack,
4347 given as a constant and a tree-expression. */
4348 struct args_size stack_args_size;
4349 tree fntype = TREE_TYPE (fndecl);
4350 tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
4351 /* This is used for the arg pointer when referring to stack args. */
4352 rtx internal_arg_pointer;
4353 /* This is a dummy PARM_DECL that we used for the function result if
4354 the function returns a structure. */
4355 tree function_result_decl = 0;
4356 #ifdef SETUP_INCOMING_VARARGS
4357 int varargs_setup = 0;
4358 #endif
4359 int reg_parm_stack_space = 0;
4360 rtx conversion_insns = 0;
4362 /* Nonzero if function takes extra anonymous args.
4363 This means the last named arg must be on the stack
4364 right before the anonymous ones. */
4365 int stdarg
4366 = (TYPE_ARG_TYPES (fntype) != 0
4367 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4368 != void_type_node));
4370 current_function_stdarg = stdarg;
4372 /* If the reg that the virtual arg pointer will be translated into is
4373 not a fixed reg or is the stack pointer, make a copy of the virtual
4374 arg pointer, and address parms via the copy. The frame pointer is
4375 considered fixed even though it is not marked as such.
4377 The second time through, simply use ap to avoid generating rtx. */
4379 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4380 || ! (fixed_regs[ARG_POINTER_REGNUM]
4381 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4382 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4383 else
4384 internal_arg_pointer = virtual_incoming_args_rtx;
4385 current_function_internal_arg_pointer = internal_arg_pointer;
4387 stack_args_size.constant = 0;
4388 stack_args_size.var = 0;
4390 /* If struct value address is treated as the first argument, make it so. */
4391 if (aggregate_value_p (DECL_RESULT (fndecl))
4392 && ! current_function_returns_pcc_struct
4393 && struct_value_incoming_rtx == 0)
4395 tree type = build_pointer_type (TREE_TYPE (fntype));
4397 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4399 DECL_ARG_TYPE (function_result_decl) = type;
4400 TREE_CHAIN (function_result_decl) = fnargs;
4401 fnargs = function_result_decl;
4404 orig_fnargs = fnargs;
4406 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4407 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4409 if (SPLIT_COMPLEX_ARGS)
4410 fnargs = split_complex_args (fnargs);
4412 #ifdef REG_PARM_STACK_SPACE
4413 #ifdef MAYBE_REG_PARM_STACK_SPACE
4414 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4415 #else
4416 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4417 #endif
4418 #endif
4420 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4421 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4422 #else
4423 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl);
4424 #endif
4426 /* We haven't yet found an argument that we must push and pretend the
4427 caller did. */
4428 current_function_pretend_args_size = 0;
4430 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4432 rtx entry_parm;
4433 rtx stack_parm;
4434 enum machine_mode promoted_mode, passed_mode;
4435 enum machine_mode nominal_mode, promoted_nominal_mode;
4436 int unsignedp;
4437 struct locate_and_pad_arg_data locate;
4438 int passed_pointer = 0;
4439 int did_conversion = 0;
4440 tree passed_type = DECL_ARG_TYPE (parm);
4441 tree nominal_type = TREE_TYPE (parm);
4442 int last_named = 0, named_arg;
4443 int in_regs;
4444 int partial = 0;
4446 /* Set LAST_NAMED if this is last named arg before last
4447 anonymous args. */
4448 if (stdarg)
4450 tree tem;
4452 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4453 if (DECL_NAME (tem))
4454 break;
4456 if (tem == 0)
4457 last_named = 1;
4459 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4460 most machines, if this is a varargs/stdarg function, then we treat
4461 the last named arg as if it were anonymous too. */
4462 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4464 if (TREE_TYPE (parm) == error_mark_node
4465 /* This can happen after weird syntax errors
4466 or if an enum type is defined among the parms. */
4467 || TREE_CODE (parm) != PARM_DECL
4468 || passed_type == NULL)
4470 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4471 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4472 TREE_USED (parm) = 1;
4473 continue;
4476 /* Find mode of arg as it is passed, and mode of arg
4477 as it should be during execution of this function. */
4478 passed_mode = TYPE_MODE (passed_type);
4479 nominal_mode = TYPE_MODE (nominal_type);
4481 /* If the parm's mode is VOID, its value doesn't matter,
4482 and avoid the usual things like emit_move_insn that could crash. */
4483 if (nominal_mode == VOIDmode)
4485 SET_DECL_RTL (parm, const0_rtx);
4486 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4487 continue;
4490 /* If the parm is to be passed as a transparent union, use the
4491 type of the first field for the tests below. We have already
4492 verified that the modes are the same. */
4493 if (DECL_TRANSPARENT_UNION (parm)
4494 || (TREE_CODE (passed_type) == UNION_TYPE
4495 && TYPE_TRANSPARENT_UNION (passed_type)))
4496 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4498 /* See if this arg was passed by invisible reference. It is if
4499 it is an object whose size depends on the contents of the
4500 object itself or if the machine requires these objects be passed
4501 that way. */
4503 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
4504 || TREE_ADDRESSABLE (passed_type)
4505 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4506 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4507 passed_type, named_arg)
4508 #endif
4511 passed_type = nominal_type = build_pointer_type (passed_type);
4512 passed_pointer = 1;
4513 passed_mode = nominal_mode = Pmode;
4515 /* See if the frontend wants to pass this by invisible reference. */
4516 else if (passed_type != nominal_type
4517 && POINTER_TYPE_P (passed_type)
4518 && TREE_TYPE (passed_type) == nominal_type)
4520 nominal_type = passed_type;
4521 passed_pointer = 1;
4522 passed_mode = nominal_mode = Pmode;
4525 promoted_mode = passed_mode;
4527 #ifdef PROMOTE_FUNCTION_ARGS
4528 /* Compute the mode in which the arg is actually extended to. */
4529 unsignedp = TREE_UNSIGNED (passed_type);
4530 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4531 #endif
4533 /* Let machine desc say which reg (if any) the parm arrives in.
4534 0 means it arrives on the stack. */
4535 #ifdef FUNCTION_INCOMING_ARG
4536 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4537 passed_type, named_arg);
4538 #else
4539 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4540 passed_type, named_arg);
4541 #endif
4543 if (entry_parm == 0)
4544 promoted_mode = passed_mode;
4546 #ifdef SETUP_INCOMING_VARARGS
4547 /* If this is the last named parameter, do any required setup for
4548 varargs or stdargs. We need to know about the case of this being an
4549 addressable type, in which case we skip the registers it
4550 would have arrived in.
4552 For stdargs, LAST_NAMED will be set for two parameters, the one that
4553 is actually the last named, and the dummy parameter. We only
4554 want to do this action once.
4556 Also, indicate when RTL generation is to be suppressed. */
4557 if (last_named && !varargs_setup)
4559 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4560 current_function_pretend_args_size, 0);
4561 varargs_setup = 1;
4563 #endif
4565 /* Determine parm's home in the stack,
4566 in case it arrives in the stack or we should pretend it did.
4568 Compute the stack position and rtx where the argument arrives
4569 and its size.
4571 There is one complexity here: If this was a parameter that would
4572 have been passed in registers, but wasn't only because it is
4573 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4574 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4575 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4576 0 as it was the previous time. */
4577 in_regs = entry_parm != 0;
4578 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4579 in_regs = 1;
4580 #endif
4581 if (!in_regs && !named_arg)
4583 int pretend_named = PRETEND_OUTGOING_VARARGS_NAMED;
4584 if (pretend_named)
4586 #ifdef FUNCTION_INCOMING_ARG
4587 in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4588 passed_type,
4589 pretend_named) != 0;
4590 #else
4591 in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
4592 passed_type,
4593 pretend_named) != 0;
4594 #endif
4598 /* If this parameter was passed both in registers and in the stack,
4599 use the copy on the stack. */
4600 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4601 entry_parm = 0;
4603 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4604 if (entry_parm)
4605 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4606 passed_type, named_arg);
4607 #endif
4609 memset (&locate, 0, sizeof (locate));
4610 locate_and_pad_parm (promoted_mode, passed_type, in_regs,
4611 entry_parm ? partial : 0, fndecl,
4612 &stack_args_size, &locate);
4615 rtx offset_rtx;
4617 /* If we're passing this arg using a reg, make its stack home
4618 the aligned stack slot. */
4619 if (entry_parm)
4620 offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
4621 else
4622 offset_rtx = ARGS_SIZE_RTX (locate.offset);
4624 if (offset_rtx == const0_rtx)
4625 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4626 else
4627 stack_parm = gen_rtx_MEM (promoted_mode,
4628 gen_rtx_PLUS (Pmode,
4629 internal_arg_pointer,
4630 offset_rtx));
4632 set_mem_attributes (stack_parm, parm, 1);
4634 /* Set also REG_ATTRS if parameter was passed in a register. */
4635 if (entry_parm)
4636 set_reg_attrs_for_parm (entry_parm, stack_parm);
4639 /* If this parm was passed part in regs and part in memory,
4640 pretend it arrived entirely in memory
4641 by pushing the register-part onto the stack.
4643 In the special case of a DImode or DFmode that is split,
4644 we could put it together in a pseudoreg directly,
4645 but for now that's not worth bothering with. */
4647 if (partial)
4649 #ifndef MAYBE_REG_PARM_STACK_SPACE
4650 /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4651 split parameters was allocated by our caller, so we
4652 won't be pushing it in the prolog. */
4653 if (reg_parm_stack_space == 0)
4654 #endif
4655 current_function_pretend_args_size
4656 = (((partial * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4657 / (PARM_BOUNDARY / BITS_PER_UNIT)
4658 * (PARM_BOUNDARY / BITS_PER_UNIT));
4660 /* Handle calls that pass values in multiple non-contiguous
4661 locations. The Irix 6 ABI has examples of this. */
4662 if (GET_CODE (entry_parm) == PARALLEL)
4663 emit_group_store (validize_mem (stack_parm), entry_parm,
4664 int_size_in_bytes (TREE_TYPE (parm)));
4666 else
4667 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
4668 partial);
4670 entry_parm = stack_parm;
4673 /* If we didn't decide this parm came in a register,
4674 by default it came on the stack. */
4675 if (entry_parm == 0)
4676 entry_parm = stack_parm;
4678 /* Record permanently how this parm was passed. */
4679 DECL_INCOMING_RTL (parm) = entry_parm;
4681 /* If there is actually space on the stack for this parm,
4682 count it in stack_args_size; otherwise set stack_parm to 0
4683 to indicate there is no preallocated stack slot for the parm. */
4685 if (entry_parm == stack_parm
4686 || (GET_CODE (entry_parm) == PARALLEL
4687 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4688 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4689 /* On some machines, even if a parm value arrives in a register
4690 there is still an (uninitialized) stack slot allocated for it.
4692 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4693 whether this parameter already has a stack slot allocated,
4694 because an arg block exists only if current_function_args_size
4695 is larger than some threshold, and we haven't calculated that
4696 yet. So, for now, we just assume that stack slots never exist
4697 in this case. */
4698 || REG_PARM_STACK_SPACE (fndecl) > 0
4699 #endif
4702 stack_args_size.constant += locate.size.constant;
4703 /* locate.size doesn't include the part in regs. */
4704 if (partial)
4705 stack_args_size.constant += current_function_pretend_args_size;
4706 if (locate.size.var)
4707 ADD_PARM_SIZE (stack_args_size, locate.size.var);
4709 else
4710 /* No stack slot was pushed for this parm. */
4711 stack_parm = 0;
4713 /* Update info on where next arg arrives in registers. */
4715 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4716 passed_type, named_arg);
4718 /* If we can't trust the parm stack slot to be aligned enough
4719 for its ultimate type, don't use that slot after entry.
4720 We'll make another stack slot, if we need one. */
4722 unsigned int thisparm_boundary
4723 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4725 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4726 stack_parm = 0;
4729 /* If parm was passed in memory, and we need to convert it on entry,
4730 don't store it back in that same slot. */
4731 if (entry_parm == stack_parm
4732 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4733 stack_parm = 0;
4735 /* When an argument is passed in multiple locations, we can't
4736 make use of this information, but we can save some copying if
4737 the whole argument is passed in a single register. */
4738 if (GET_CODE (entry_parm) == PARALLEL
4739 && nominal_mode != BLKmode && passed_mode != BLKmode)
4741 int i, len = XVECLEN (entry_parm, 0);
4743 for (i = 0; i < len; i++)
4744 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4745 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4746 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4747 == passed_mode)
4748 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4750 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4751 DECL_INCOMING_RTL (parm) = entry_parm;
4752 break;
4756 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4757 in the mode in which it arrives.
4758 STACK_PARM is an RTX for a stack slot where the parameter can live
4759 during the function (in case we want to put it there).
4760 STACK_PARM is 0 if no stack slot was pushed for it.
4762 Now output code if necessary to convert ENTRY_PARM to
4763 the type in which this function declares it,
4764 and store that result in an appropriate place,
4765 which may be a pseudo reg, may be STACK_PARM,
4766 or may be a local stack slot if STACK_PARM is 0.
4768 Set DECL_RTL to that place. */
4770 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4772 /* If a BLKmode arrives in registers, copy it to a stack slot.
4773 Handle calls that pass values in multiple non-contiguous
4774 locations. The Irix 6 ABI has examples of this. */
4775 if (GET_CODE (entry_parm) == REG
4776 || GET_CODE (entry_parm) == PARALLEL)
4778 int size = int_size_in_bytes (TREE_TYPE (parm));
4779 int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
4780 rtx mem;
4782 /* Note that we will be storing an integral number of words.
4783 So we have to be careful to ensure that we allocate an
4784 integral number of words. We do this below in the
4785 assign_stack_local if space was not allocated in the argument
4786 list. If it was, this will not work if PARM_BOUNDARY is not
4787 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4788 if it becomes a problem. */
4790 if (stack_parm == 0)
4792 stack_parm
4793 = assign_stack_local (GET_MODE (entry_parm),
4794 size_stored, 0);
4795 set_mem_attributes (stack_parm, parm, 1);
4798 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4799 abort ();
4801 mem = validize_mem (stack_parm);
4803 /* Handle calls that pass values in multiple non-contiguous
4804 locations. The Irix 6 ABI has examples of this. */
4805 if (GET_CODE (entry_parm) == PARALLEL)
4806 emit_group_store (mem, entry_parm, size);
4808 else if (size == 0)
4811 /* If SIZE is that of a mode no bigger than a word, just use
4812 that mode's store operation. */
4813 else if (size <= UNITS_PER_WORD)
4815 enum machine_mode mode
4816 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
4818 if (mode != BLKmode)
4820 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
4821 emit_move_insn (change_address (mem, mode, 0), reg);
4824 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4825 machine must be aligned to the left before storing
4826 to memory. Note that the previous test doesn't
4827 handle all cases (e.g. SIZE == 3). */
4828 else if (size != UNITS_PER_WORD
4829 && BYTES_BIG_ENDIAN)
4831 rtx tem, x;
4832 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4833 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
4835 x = expand_binop (word_mode, ashl_optab, reg,
4836 GEN_INT (by), 0, 1, OPTAB_WIDEN);
4837 tem = change_address (mem, word_mode, 0);
4838 emit_move_insn (tem, x);
4840 else
4841 move_block_from_reg (REGNO (entry_parm), mem,
4842 size_stored / UNITS_PER_WORD);
4844 else
4845 move_block_from_reg (REGNO (entry_parm), mem,
4846 size_stored / UNITS_PER_WORD);
4848 SET_DECL_RTL (parm, stack_parm);
4850 else if (! ((! optimize
4851 && ! DECL_REGISTER (parm))
4852 || TREE_SIDE_EFFECTS (parm)
4853 /* If -ffloat-store specified, don't put explicit
4854 float variables into registers. */
4855 || (flag_float_store
4856 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4857 /* Always assign pseudo to structure return or item passed
4858 by invisible reference. */
4859 || passed_pointer || parm == function_result_decl)
4861 /* Store the parm in a pseudoregister during the function, but we
4862 may need to do it in a wider mode. */
4864 rtx parmreg;
4865 unsigned int regno, regnoi = 0, regnor = 0;
4867 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4869 promoted_nominal_mode
4870 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4872 parmreg = gen_reg_rtx (promoted_nominal_mode);
4873 mark_user_reg (parmreg);
4875 /* If this was an item that we received a pointer to, set DECL_RTL
4876 appropriately. */
4877 if (passed_pointer)
4879 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4880 parmreg);
4881 set_mem_attributes (x, parm, 1);
4882 SET_DECL_RTL (parm, x);
4884 else
4886 SET_DECL_RTL (parm, parmreg);
4887 maybe_set_unchanging (DECL_RTL (parm), parm);
4890 /* Copy the value into the register. */
4891 if (nominal_mode != passed_mode
4892 || promoted_nominal_mode != promoted_mode)
4894 int save_tree_used;
4895 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4896 mode, by the caller. We now have to convert it to
4897 NOMINAL_MODE, if different. However, PARMREG may be in
4898 a different mode than NOMINAL_MODE if it is being stored
4899 promoted.
4901 If ENTRY_PARM is a hard register, it might be in a register
4902 not valid for operating in its mode (e.g., an odd-numbered
4903 register for a DFmode). In that case, moves are the only
4904 thing valid, so we can't do a convert from there. This
4905 occurs when the calling sequence allow such misaligned
4906 usages.
4908 In addition, the conversion may involve a call, which could
4909 clobber parameters which haven't been copied to pseudo
4910 registers yet. Therefore, we must first copy the parm to
4911 a pseudo reg here, and save the conversion until after all
4912 parameters have been moved. */
4914 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4916 emit_move_insn (tempreg, validize_mem (entry_parm));
4918 push_to_sequence (conversion_insns);
4919 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4921 if (GET_CODE (tempreg) == SUBREG
4922 && GET_MODE (tempreg) == nominal_mode
4923 && GET_CODE (SUBREG_REG (tempreg)) == REG
4924 && nominal_mode == passed_mode
4925 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4926 && GET_MODE_SIZE (GET_MODE (tempreg))
4927 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4929 /* The argument is already sign/zero extended, so note it
4930 into the subreg. */
4931 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4932 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4935 /* TREE_USED gets set erroneously during expand_assignment. */
4936 save_tree_used = TREE_USED (parm);
4937 expand_assignment (parm,
4938 make_tree (nominal_type, tempreg), 0, 0);
4939 TREE_USED (parm) = save_tree_used;
4940 conversion_insns = get_insns ();
4941 did_conversion = 1;
4942 end_sequence ();
4944 else
4945 emit_move_insn (parmreg, validize_mem (entry_parm));
4947 /* If we were passed a pointer but the actual value
4948 can safely live in a register, put it in one. */
4949 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4950 /* If by-reference argument was promoted, demote it. */
4951 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4952 || ! ((! optimize
4953 && ! DECL_REGISTER (parm))
4954 || TREE_SIDE_EFFECTS (parm)
4955 /* If -ffloat-store specified, don't put explicit
4956 float variables into registers. */
4957 || (flag_float_store
4958 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4960 /* We can't use nominal_mode, because it will have been set to
4961 Pmode above. We must use the actual mode of the parm. */
4962 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4963 mark_user_reg (parmreg);
4964 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4966 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4967 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4968 push_to_sequence (conversion_insns);
4969 emit_move_insn (tempreg, DECL_RTL (parm));
4970 SET_DECL_RTL (parm,
4971 convert_to_mode (GET_MODE (parmreg),
4972 tempreg,
4973 unsigned_p));
4974 emit_move_insn (parmreg, DECL_RTL (parm));
4975 conversion_insns = get_insns();
4976 did_conversion = 1;
4977 end_sequence ();
4979 else
4980 emit_move_insn (parmreg, DECL_RTL (parm));
4981 SET_DECL_RTL (parm, parmreg);
4982 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4983 now the parm. */
4984 stack_parm = 0;
4986 #ifdef FUNCTION_ARG_CALLEE_COPIES
4987 /* If we are passed an arg by reference and it is our responsibility
4988 to make a copy, do it now.
4989 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4990 original argument, so we must recreate them in the call to
4991 FUNCTION_ARG_CALLEE_COPIES. */
4992 /* ??? Later add code to handle the case that if the argument isn't
4993 modified, don't do the copy. */
4995 else if (passed_pointer
4996 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4997 TYPE_MODE (DECL_ARG_TYPE (parm)),
4998 DECL_ARG_TYPE (parm),
4999 named_arg)
5000 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
5002 rtx copy;
5003 tree type = DECL_ARG_TYPE (parm);
5005 /* This sequence may involve a library call perhaps clobbering
5006 registers that haven't been copied to pseudos yet. */
5008 push_to_sequence (conversion_insns);
5010 if (!COMPLETE_TYPE_P (type)
5011 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5012 /* This is a variable sized object. */
5013 copy = gen_rtx_MEM (BLKmode,
5014 allocate_dynamic_stack_space
5015 (expr_size (parm), NULL_RTX,
5016 TYPE_ALIGN (type)));
5017 else
5018 copy = assign_stack_temp (TYPE_MODE (type),
5019 int_size_in_bytes (type), 1);
5020 set_mem_attributes (copy, parm, 1);
5022 store_expr (parm, copy, 0);
5023 emit_move_insn (parmreg, XEXP (copy, 0));
5024 conversion_insns = get_insns ();
5025 did_conversion = 1;
5026 end_sequence ();
5028 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5030 /* In any case, record the parm's desired stack location
5031 in case we later discover it must live in the stack.
5033 If it is a COMPLEX value, store the stack location for both
5034 halves. */
5036 if (GET_CODE (parmreg) == CONCAT)
5037 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
5038 else
5039 regno = REGNO (parmreg);
5041 if (regno >= max_parm_reg)
5043 rtx *new;
5044 int old_max_parm_reg = max_parm_reg;
5046 /* It's slow to expand this one register at a time,
5047 but it's also rare and we need max_parm_reg to be
5048 precisely correct. */
5049 max_parm_reg = regno + 1;
5050 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
5051 max_parm_reg * sizeof (rtx));
5052 memset ((char *) (new + old_max_parm_reg), 0,
5053 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5054 parm_reg_stack_loc = new;
5057 if (GET_CODE (parmreg) == CONCAT)
5059 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5061 regnor = REGNO (gen_realpart (submode, parmreg));
5062 regnoi = REGNO (gen_imagpart (submode, parmreg));
5064 if (stack_parm != 0)
5066 parm_reg_stack_loc[regnor]
5067 = gen_realpart (submode, stack_parm);
5068 parm_reg_stack_loc[regnoi]
5069 = gen_imagpart (submode, stack_parm);
5071 else
5073 parm_reg_stack_loc[regnor] = 0;
5074 parm_reg_stack_loc[regnoi] = 0;
5077 else
5078 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5080 /* Mark the register as eliminable if we did no conversion
5081 and it was copied from memory at a fixed offset,
5082 and the arg pointer was not copied to a pseudo-reg.
5083 If the arg pointer is a pseudo reg or the offset formed
5084 an invalid address, such memory-equivalences
5085 as we make here would screw up life analysis for it. */
5086 if (nominal_mode == passed_mode
5087 && ! did_conversion
5088 && stack_parm != 0
5089 && GET_CODE (stack_parm) == MEM
5090 && locate.offset.var == 0
5091 && reg_mentioned_p (virtual_incoming_args_rtx,
5092 XEXP (stack_parm, 0)))
5094 rtx linsn = get_last_insn ();
5095 rtx sinsn, set;
5097 /* Mark complex types separately. */
5098 if (GET_CODE (parmreg) == CONCAT)
5099 /* Scan backwards for the set of the real and
5100 imaginary parts. */
5101 for (sinsn = linsn; sinsn != 0;
5102 sinsn = prev_nonnote_insn (sinsn))
5104 set = single_set (sinsn);
5105 if (set != 0
5106 && SET_DEST (set) == regno_reg_rtx [regnoi])
5107 REG_NOTES (sinsn)
5108 = gen_rtx_EXPR_LIST (REG_EQUIV,
5109 parm_reg_stack_loc[regnoi],
5110 REG_NOTES (sinsn));
5111 else if (set != 0
5112 && SET_DEST (set) == regno_reg_rtx [regnor])
5113 REG_NOTES (sinsn)
5114 = gen_rtx_EXPR_LIST (REG_EQUIV,
5115 parm_reg_stack_loc[regnor],
5116 REG_NOTES (sinsn));
5118 else if ((set = single_set (linsn)) != 0
5119 && SET_DEST (set) == parmreg)
5120 REG_NOTES (linsn)
5121 = gen_rtx_EXPR_LIST (REG_EQUIV,
5122 stack_parm, REG_NOTES (linsn));
5125 /* For pointer data type, suggest pointer register. */
5126 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5127 mark_reg_pointer (parmreg,
5128 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5130 /* If something wants our address, try to use ADDRESSOF. */
5131 if (TREE_ADDRESSABLE (parm))
5133 /* If we end up putting something into the stack,
5134 fixup_var_refs_insns will need to make a pass over
5135 all the instructions. It looks through the pending
5136 sequences -- but it can't see the ones in the
5137 CONVERSION_INSNS, if they're not on the sequence
5138 stack. So, we go back to that sequence, just so that
5139 the fixups will happen. */
5140 push_to_sequence (conversion_insns);
5141 put_var_into_stack (parm, /*rescan=*/true);
5142 conversion_insns = get_insns ();
5143 end_sequence ();
5146 else
5148 /* Value must be stored in the stack slot STACK_PARM
5149 during function execution. */
5151 if (promoted_mode != nominal_mode)
5153 /* Conversion is required. */
5154 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5156 emit_move_insn (tempreg, validize_mem (entry_parm));
5158 push_to_sequence (conversion_insns);
5159 entry_parm = convert_to_mode (nominal_mode, tempreg,
5160 TREE_UNSIGNED (TREE_TYPE (parm)));
5161 if (stack_parm)
5162 /* ??? This may need a big-endian conversion on sparc64. */
5163 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5165 conversion_insns = get_insns ();
5166 did_conversion = 1;
5167 end_sequence ();
5170 if (entry_parm != stack_parm)
5172 if (stack_parm == 0)
5174 stack_parm
5175 = assign_stack_local (GET_MODE (entry_parm),
5176 GET_MODE_SIZE (GET_MODE (entry_parm)),
5178 set_mem_attributes (stack_parm, parm, 1);
5181 if (promoted_mode != nominal_mode)
5183 push_to_sequence (conversion_insns);
5184 emit_move_insn (validize_mem (stack_parm),
5185 validize_mem (entry_parm));
5186 conversion_insns = get_insns ();
5187 end_sequence ();
5189 else
5190 emit_move_insn (validize_mem (stack_parm),
5191 validize_mem (entry_parm));
5194 SET_DECL_RTL (parm, stack_parm);
5198 if (SPLIT_COMPLEX_ARGS)
5200 parm = orig_fnargs;
5202 for (; parm; parm = TREE_CHAIN (parm))
5204 tree type = TREE_TYPE (parm);
5206 if (TREE_CODE (type) == COMPLEX_TYPE)
5208 SET_DECL_RTL (parm,
5209 gen_rtx_CONCAT (DECL_MODE (parm),
5210 DECL_RTL (fnargs),
5211 DECL_RTL (TREE_CHAIN (fnargs))));
5212 DECL_INCOMING_RTL (parm)
5213 = gen_rtx_CONCAT (DECL_MODE (parm),
5214 DECL_INCOMING_RTL (fnargs),
5215 DECL_INCOMING_RTL (TREE_CHAIN (fnargs)));
5216 fnargs = TREE_CHAIN (fnargs);
5218 else
5220 SET_DECL_RTL (parm, DECL_RTL (fnargs));
5221 DECL_INCOMING_RTL (parm) = DECL_INCOMING_RTL (fnargs);
5223 fnargs = TREE_CHAIN (fnargs);
5227 /* Output all parameter conversion instructions (possibly including calls)
5228 now that all parameters have been copied out of hard registers. */
5229 emit_insn (conversion_insns);
5231 /* If we are receiving a struct value address as the first argument, set up
5232 the RTL for the function result. As this might require code to convert
5233 the transmitted address to Pmode, we do this here to ensure that possible
5234 preliminary conversions of the address have been emitted already. */
5235 if (function_result_decl)
5237 tree result = DECL_RESULT (fndecl);
5238 rtx addr = DECL_RTL (function_result_decl);
5239 rtx x;
5241 #ifdef POINTERS_EXTEND_UNSIGNED
5242 if (GET_MODE (addr) != Pmode)
5243 addr = convert_memory_address (Pmode, addr);
5244 #endif
5246 x = gen_rtx_MEM (DECL_MODE (result), addr);
5247 set_mem_attributes (x, result, 1);
5248 SET_DECL_RTL (result, x);
5251 last_parm_insn = get_last_insn ();
5253 current_function_args_size = stack_args_size.constant;
5255 /* Adjust function incoming argument size for alignment and
5256 minimum length. */
5258 #ifdef REG_PARM_STACK_SPACE
5259 #ifndef MAYBE_REG_PARM_STACK_SPACE
5260 current_function_args_size = MAX (current_function_args_size,
5261 REG_PARM_STACK_SPACE (fndecl));
5262 #endif
5263 #endif
5265 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5267 current_function_args_size
5268 = ((current_function_args_size + STACK_BYTES - 1)
5269 / STACK_BYTES) * STACK_BYTES;
5271 #ifdef ARGS_GROW_DOWNWARD
5272 current_function_arg_offset_rtx
5273 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5274 : expand_expr (size_diffop (stack_args_size.var,
5275 size_int (-stack_args_size.constant)),
5276 NULL_RTX, VOIDmode, 0));
5277 #else
5278 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5279 #endif
5281 /* See how many bytes, if any, of its args a function should try to pop
5282 on return. */
5284 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5285 current_function_args_size);
5287 /* For stdarg.h function, save info about
5288 regs and stack space used by the named args. */
5290 current_function_args_info = args_so_far;
5292 /* Set the rtx used for the function return value. Put this in its
5293 own variable so any optimizers that need this information don't have
5294 to include tree.h. Do this here so it gets done when an inlined
5295 function gets output. */
5297 current_function_return_rtx
5298 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5299 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5301 /* If scalar return value was computed in a pseudo-reg, or was a named
5302 return value that got dumped to the stack, copy that to the hard
5303 return register. */
5304 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5306 tree decl_result = DECL_RESULT (fndecl);
5307 rtx decl_rtl = DECL_RTL (decl_result);
5309 if (REG_P (decl_rtl)
5310 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5311 : DECL_REGISTER (decl_result))
5313 rtx real_decl_rtl;
5315 #ifdef FUNCTION_OUTGOING_VALUE
5316 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5317 fndecl);
5318 #else
5319 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5320 fndecl);
5321 #endif
5322 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5323 /* The delay slot scheduler assumes that current_function_return_rtx
5324 holds the hard register containing the return value, not a
5325 temporary pseudo. */
5326 current_function_return_rtx = real_decl_rtl;
5331 static tree
5332 split_complex_args (tree args)
5334 tree p;
5336 args = copy_list (args);
5338 for (p = args; p; p = TREE_CHAIN (p))
5340 tree complex_type = TREE_TYPE (p);
5342 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
5344 tree decl;
5345 tree subtype = TREE_TYPE (complex_type);
5347 /* Rewrite the PARM_DECL's type with its component. */
5348 TREE_TYPE (p) = subtype;
5349 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
5351 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
5352 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
5353 TREE_CHAIN (decl) = TREE_CHAIN (p);
5354 TREE_CHAIN (p) = decl;
5358 return args;
5361 /* Indicate whether REGNO is an incoming argument to the current function
5362 that was promoted to a wider mode. If so, return the RTX for the
5363 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5364 that REGNO is promoted from and whether the promotion was signed or
5365 unsigned. */
5367 #ifdef PROMOTE_FUNCTION_ARGS
5370 promoted_input_arg (regno, pmode, punsignedp)
5371 unsigned int regno;
5372 enum machine_mode *pmode;
5373 int *punsignedp;
5375 tree arg;
5377 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5378 arg = TREE_CHAIN (arg))
5379 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5380 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5381 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5383 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5384 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5386 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5387 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5388 && mode != DECL_MODE (arg))
5390 *pmode = DECL_MODE (arg);
5391 *punsignedp = unsignedp;
5392 return DECL_INCOMING_RTL (arg);
5396 return 0;
5399 #endif
5401 /* Compute the size and offset from the start of the stacked arguments for a
5402 parm passed in mode PASSED_MODE and with type TYPE.
5404 INITIAL_OFFSET_PTR points to the current offset into the stacked
5405 arguments.
5407 The starting offset and size for this parm are returned in
5408 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5409 nonzero, the offset is that of stack slot, which is returned in
5410 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5411 padding required from the initial offset ptr to the stack slot.
5413 IN_REGS is nonzero if the argument will be passed in registers. It will
5414 never be set if REG_PARM_STACK_SPACE is not defined.
5416 FNDECL is the function in which the argument was defined.
5418 There are two types of rounding that are done. The first, controlled by
5419 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5420 list to be aligned to the specific boundary (in bits). This rounding
5421 affects the initial and starting offsets, but not the argument size.
5423 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5424 optionally rounds the size of the parm to PARM_BOUNDARY. The
5425 initial offset is not affected by this rounding, while the size always
5426 is and the starting offset may be. */
5428 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5429 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5430 callers pass in the total size of args so far as
5431 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5433 void
5434 locate_and_pad_parm (passed_mode, type, in_regs, partial, fndecl,
5435 initial_offset_ptr, locate)
5436 enum machine_mode passed_mode;
5437 tree type;
5438 int in_regs;
5439 int partial;
5440 tree fndecl ATTRIBUTE_UNUSED;
5441 struct args_size *initial_offset_ptr;
5442 struct locate_and_pad_arg_data *locate;
5444 tree sizetree;
5445 enum direction where_pad;
5446 int boundary;
5447 int reg_parm_stack_space = 0;
5448 int part_size_in_regs;
5450 #ifdef REG_PARM_STACK_SPACE
5451 #ifdef MAYBE_REG_PARM_STACK_SPACE
5452 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5453 #else
5454 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5455 #endif
5457 /* If we have found a stack parm before we reach the end of the
5458 area reserved for registers, skip that area. */
5459 if (! in_regs)
5461 if (reg_parm_stack_space > 0)
5463 if (initial_offset_ptr->var)
5465 initial_offset_ptr->var
5466 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5467 ssize_int (reg_parm_stack_space));
5468 initial_offset_ptr->constant = 0;
5470 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5471 initial_offset_ptr->constant = reg_parm_stack_space;
5474 #endif /* REG_PARM_STACK_SPACE */
5476 part_size_in_regs = 0;
5477 if (reg_parm_stack_space == 0)
5478 part_size_in_regs = ((partial * UNITS_PER_WORD)
5479 / (PARM_BOUNDARY / BITS_PER_UNIT)
5480 * (PARM_BOUNDARY / BITS_PER_UNIT));
5482 sizetree
5483 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5484 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5485 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5487 #ifdef ARGS_GROW_DOWNWARD
5488 locate->slot_offset.constant = -initial_offset_ptr->constant;
5489 if (initial_offset_ptr->var)
5490 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
5491 initial_offset_ptr->var);
5494 tree s2 = sizetree;
5495 if (where_pad != none
5496 && (!host_integerp (sizetree, 1)
5497 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5498 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5499 SUB_PARM_SIZE (locate->slot_offset, s2);
5502 locate->slot_offset.constant += part_size_in_regs;
5504 if (!in_regs
5505 #ifdef REG_PARM_STACK_SPACE
5506 || REG_PARM_STACK_SPACE (fndecl) > 0
5507 #endif
5509 pad_to_arg_alignment (&locate->slot_offset, boundary,
5510 &locate->alignment_pad);
5512 locate->size.constant = (-initial_offset_ptr->constant
5513 - locate->slot_offset.constant);
5514 if (initial_offset_ptr->var)
5515 locate->size.var = size_binop (MINUS_EXPR,
5516 size_binop (MINUS_EXPR,
5517 ssize_int (0),
5518 initial_offset_ptr->var),
5519 locate->slot_offset.var);
5521 /* Pad_below needs the pre-rounded size to know how much to pad
5522 below. */
5523 locate->offset = locate->slot_offset;
5524 if (where_pad == downward)
5525 pad_below (&locate->offset, passed_mode, sizetree);
5527 #else /* !ARGS_GROW_DOWNWARD */
5528 if (!in_regs
5529 #ifdef REG_PARM_STACK_SPACE
5530 || REG_PARM_STACK_SPACE (fndecl) > 0
5531 #endif
5533 pad_to_arg_alignment (initial_offset_ptr, boundary,
5534 &locate->alignment_pad);
5535 locate->slot_offset = *initial_offset_ptr;
5537 #ifdef PUSH_ROUNDING
5538 if (passed_mode != BLKmode)
5539 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5540 #endif
5542 /* Pad_below needs the pre-rounded size to know how much to pad below
5543 so this must be done before rounding up. */
5544 locate->offset = locate->slot_offset;
5545 if (where_pad == downward)
5546 pad_below (&locate->offset, passed_mode, sizetree);
5548 if (where_pad != none
5549 && (!host_integerp (sizetree, 1)
5550 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5551 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5553 ADD_PARM_SIZE (locate->size, sizetree);
5555 locate->size.constant -= part_size_in_regs;
5556 #endif /* ARGS_GROW_DOWNWARD */
5559 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5560 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5562 static void
5563 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5564 struct args_size *offset_ptr;
5565 int boundary;
5566 struct args_size *alignment_pad;
5568 tree save_var = NULL_TREE;
5569 HOST_WIDE_INT save_constant = 0;
5571 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5573 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5575 save_var = offset_ptr->var;
5576 save_constant = offset_ptr->constant;
5579 alignment_pad->var = NULL_TREE;
5580 alignment_pad->constant = 0;
5582 if (boundary > BITS_PER_UNIT)
5584 if (offset_ptr->var)
5586 offset_ptr->var =
5587 #ifdef ARGS_GROW_DOWNWARD
5588 round_down
5589 #else
5590 round_up
5591 #endif
5592 (ARGS_SIZE_TREE (*offset_ptr),
5593 boundary / BITS_PER_UNIT);
5594 /* ARGS_SIZE_TREE includes constant term. */
5595 offset_ptr->constant = 0;
5596 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5597 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5598 save_var);
5600 else
5602 offset_ptr->constant =
5603 #ifdef ARGS_GROW_DOWNWARD
5604 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5605 #else
5606 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5607 #endif
5608 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5609 alignment_pad->constant = offset_ptr->constant - save_constant;
5614 static void
5615 pad_below (offset_ptr, passed_mode, sizetree)
5616 struct args_size *offset_ptr;
5617 enum machine_mode passed_mode;
5618 tree sizetree;
5620 if (passed_mode != BLKmode)
5622 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5623 offset_ptr->constant
5624 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5625 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5626 - GET_MODE_SIZE (passed_mode));
5628 else
5630 if (TREE_CODE (sizetree) != INTEGER_CST
5631 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5633 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5634 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5635 /* Add it in. */
5636 ADD_PARM_SIZE (*offset_ptr, s2);
5637 SUB_PARM_SIZE (*offset_ptr, sizetree);
5642 /* Walk the tree of blocks describing the binding levels within a function
5643 and warn about uninitialized variables.
5644 This is done after calling flow_analysis and before global_alloc
5645 clobbers the pseudo-regs to hard regs. */
5647 void
5648 uninitialized_vars_warning (block)
5649 tree block;
5651 tree decl, sub;
5652 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5654 if (warn_uninitialized
5655 && TREE_CODE (decl) == VAR_DECL
5656 /* These warnings are unreliable for and aggregates
5657 because assigning the fields one by one can fail to convince
5658 flow.c that the entire aggregate was initialized.
5659 Unions are troublesome because members may be shorter. */
5660 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5661 && DECL_RTL (decl) != 0
5662 && GET_CODE (DECL_RTL (decl)) == REG
5663 /* Global optimizations can make it difficult to determine if a
5664 particular variable has been initialized. However, a VAR_DECL
5665 with a nonzero DECL_INITIAL had an initializer, so do not
5666 claim it is potentially uninitialized.
5668 We do not care about the actual value in DECL_INITIAL, so we do
5669 not worry that it may be a dangling pointer. */
5670 && DECL_INITIAL (decl) == NULL_TREE
5671 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5672 warning_with_decl (decl,
5673 "`%s' might be used uninitialized in this function");
5674 if (extra_warnings
5675 && TREE_CODE (decl) == VAR_DECL
5676 && DECL_RTL (decl) != 0
5677 && GET_CODE (DECL_RTL (decl)) == REG
5678 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5679 warning_with_decl (decl,
5680 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5682 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5683 uninitialized_vars_warning (sub);
5686 /* Do the appropriate part of uninitialized_vars_warning
5687 but for arguments instead of local variables. */
5689 void
5690 setjmp_args_warning ()
5692 tree decl;
5693 for (decl = DECL_ARGUMENTS (current_function_decl);
5694 decl; decl = TREE_CHAIN (decl))
5695 if (DECL_RTL (decl) != 0
5696 && GET_CODE (DECL_RTL (decl)) == REG
5697 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5698 warning_with_decl (decl,
5699 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5702 /* If this function call setjmp, put all vars into the stack
5703 unless they were declared `register'. */
5705 void
5706 setjmp_protect (block)
5707 tree block;
5709 tree decl, sub;
5710 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5711 if ((TREE_CODE (decl) == VAR_DECL
5712 || TREE_CODE (decl) == PARM_DECL)
5713 && DECL_RTL (decl) != 0
5714 && (GET_CODE (DECL_RTL (decl)) == REG
5715 || (GET_CODE (DECL_RTL (decl)) == MEM
5716 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5717 /* If this variable came from an inline function, it must be
5718 that its life doesn't overlap the setjmp. If there was a
5719 setjmp in the function, it would already be in memory. We
5720 must exclude such variable because their DECL_RTL might be
5721 set to strange things such as virtual_stack_vars_rtx. */
5722 && ! DECL_FROM_INLINE (decl)
5723 && (
5724 #ifdef NON_SAVING_SETJMP
5725 /* If longjmp doesn't restore the registers,
5726 don't put anything in them. */
5727 NON_SAVING_SETJMP
5729 #endif
5730 ! DECL_REGISTER (decl)))
5731 put_var_into_stack (decl, /*rescan=*/true);
5732 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5733 setjmp_protect (sub);
5736 /* Like the previous function, but for args instead of local variables. */
5738 void
5739 setjmp_protect_args ()
5741 tree decl;
5742 for (decl = DECL_ARGUMENTS (current_function_decl);
5743 decl; decl = TREE_CHAIN (decl))
5744 if ((TREE_CODE (decl) == VAR_DECL
5745 || TREE_CODE (decl) == PARM_DECL)
5746 && DECL_RTL (decl) != 0
5747 && (GET_CODE (DECL_RTL (decl)) == REG
5748 || (GET_CODE (DECL_RTL (decl)) == MEM
5749 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5750 && (
5751 /* If longjmp doesn't restore the registers,
5752 don't put anything in them. */
5753 #ifdef NON_SAVING_SETJMP
5754 NON_SAVING_SETJMP
5756 #endif
5757 ! DECL_REGISTER (decl)))
5758 put_var_into_stack (decl, /*rescan=*/true);
5761 /* Return the context-pointer register corresponding to DECL,
5762 or 0 if it does not need one. */
5765 lookup_static_chain (decl)
5766 tree decl;
5768 tree context = decl_function_context (decl);
5769 tree link;
5771 if (context == 0
5772 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5773 return 0;
5775 /* We treat inline_function_decl as an alias for the current function
5776 because that is the inline function whose vars, types, etc.
5777 are being merged into the current function.
5778 See expand_inline_function. */
5779 if (context == current_function_decl || context == inline_function_decl)
5780 return virtual_stack_vars_rtx;
5782 for (link = context_display; link; link = TREE_CHAIN (link))
5783 if (TREE_PURPOSE (link) == context)
5784 return RTL_EXPR_RTL (TREE_VALUE (link));
5786 abort ();
5789 /* Convert a stack slot address ADDR for variable VAR
5790 (from a containing function)
5791 into an address valid in this function (using a static chain). */
5794 fix_lexical_addr (addr, var)
5795 rtx addr;
5796 tree var;
5798 rtx basereg;
5799 HOST_WIDE_INT displacement;
5800 tree context = decl_function_context (var);
5801 struct function *fp;
5802 rtx base = 0;
5804 /* If this is the present function, we need not do anything. */
5805 if (context == current_function_decl || context == inline_function_decl)
5806 return addr;
5808 fp = find_function_data (context);
5810 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5811 addr = XEXP (XEXP (addr, 0), 0);
5813 /* Decode given address as base reg plus displacement. */
5814 if (GET_CODE (addr) == REG)
5815 basereg = addr, displacement = 0;
5816 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5817 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5818 else
5819 abort ();
5821 /* We accept vars reached via the containing function's
5822 incoming arg pointer and via its stack variables pointer. */
5823 if (basereg == fp->internal_arg_pointer)
5825 /* If reached via arg pointer, get the arg pointer value
5826 out of that function's stack frame.
5828 There are two cases: If a separate ap is needed, allocate a
5829 slot in the outer function for it and dereference it that way.
5830 This is correct even if the real ap is actually a pseudo.
5831 Otherwise, just adjust the offset from the frame pointer to
5832 compensate. */
5834 #ifdef NEED_SEPARATE_AP
5835 rtx addr;
5837 addr = get_arg_pointer_save_area (fp);
5838 addr = fix_lexical_addr (XEXP (addr, 0), var);
5839 addr = memory_address (Pmode, addr);
5841 base = gen_rtx_MEM (Pmode, addr);
5842 set_mem_alias_set (base, get_frame_alias_set ());
5843 base = copy_to_reg (base);
5844 #else
5845 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5846 base = lookup_static_chain (var);
5847 #endif
5850 else if (basereg == virtual_stack_vars_rtx)
5852 /* This is the same code as lookup_static_chain, duplicated here to
5853 avoid an extra call to decl_function_context. */
5854 tree link;
5856 for (link = context_display; link; link = TREE_CHAIN (link))
5857 if (TREE_PURPOSE (link) == context)
5859 base = RTL_EXPR_RTL (TREE_VALUE (link));
5860 break;
5864 if (base == 0)
5865 abort ();
5867 /* Use same offset, relative to appropriate static chain or argument
5868 pointer. */
5869 return plus_constant (base, displacement);
5872 /* Return the address of the trampoline for entering nested fn FUNCTION.
5873 If necessary, allocate a trampoline (in the stack frame)
5874 and emit rtl to initialize its contents (at entry to this function). */
5877 trampoline_address (function)
5878 tree function;
5880 tree link;
5881 tree rtlexp;
5882 rtx tramp;
5883 struct function *fp;
5884 tree fn_context;
5886 /* Find an existing trampoline and return it. */
5887 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5888 if (TREE_PURPOSE (link) == function)
5889 return
5890 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5892 for (fp = outer_function_chain; fp; fp = fp->outer)
5893 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5894 if (TREE_PURPOSE (link) == function)
5896 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5897 function);
5898 return adjust_trampoline_addr (tramp);
5901 /* None exists; we must make one. */
5903 /* Find the `struct function' for the function containing FUNCTION. */
5904 fp = 0;
5905 fn_context = decl_function_context (function);
5906 if (fn_context != current_function_decl
5907 && fn_context != inline_function_decl)
5908 fp = find_function_data (fn_context);
5910 /* Allocate run-time space for this trampoline. */
5911 /* If rounding needed, allocate extra space
5912 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5913 #define TRAMPOLINE_REAL_SIZE \
5914 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5915 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5916 fp ? fp : cfun);
5917 /* Record the trampoline for reuse and note it for later initialization
5918 by expand_function_end. */
5919 if (fp != 0)
5921 rtlexp = make_node (RTL_EXPR);
5922 RTL_EXPR_RTL (rtlexp) = tramp;
5923 fp->x_trampoline_list = tree_cons (function, rtlexp,
5924 fp->x_trampoline_list);
5926 else
5928 /* Make the RTL_EXPR node temporary, not momentary, so that the
5929 trampoline_list doesn't become garbage. */
5930 rtlexp = make_node (RTL_EXPR);
5932 RTL_EXPR_RTL (rtlexp) = tramp;
5933 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5936 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5937 return adjust_trampoline_addr (tramp);
5940 /* Given a trampoline address,
5941 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5943 static rtx
5944 round_trampoline_addr (tramp)
5945 rtx tramp;
5947 /* Round address up to desired boundary. */
5948 rtx temp = gen_reg_rtx (Pmode);
5949 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5950 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5952 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5953 temp, 0, OPTAB_LIB_WIDEN);
5954 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5955 temp, 0, OPTAB_LIB_WIDEN);
5957 return tramp;
5960 /* Given a trampoline address, round it then apply any
5961 platform-specific adjustments so that the result can be used for a
5962 function call . */
5964 static rtx
5965 adjust_trampoline_addr (tramp)
5966 rtx tramp;
5968 tramp = round_trampoline_addr (tramp);
5969 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5970 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5971 #endif
5972 return tramp;
5975 /* Put all this function's BLOCK nodes including those that are chained
5976 onto the first block into a vector, and return it.
5977 Also store in each NOTE for the beginning or end of a block
5978 the index of that block in the vector.
5979 The arguments are BLOCK, the chain of top-level blocks of the function,
5980 and INSNS, the insn chain of the function. */
5982 void
5983 identify_blocks ()
5985 int n_blocks;
5986 tree *block_vector, *last_block_vector;
5987 tree *block_stack;
5988 tree block = DECL_INITIAL (current_function_decl);
5990 if (block == 0)
5991 return;
5993 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5994 depth-first order. */
5995 block_vector = get_block_vector (block, &n_blocks);
5996 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5998 last_block_vector = identify_blocks_1 (get_insns (),
5999 block_vector + 1,
6000 block_vector + n_blocks,
6001 block_stack);
6003 /* If we didn't use all of the subblocks, we've misplaced block notes. */
6004 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
6005 if (0 && last_block_vector != block_vector + n_blocks)
6006 abort ();
6008 free (block_vector);
6009 free (block_stack);
6012 /* Subroutine of identify_blocks. Do the block substitution on the
6013 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
6015 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
6016 BLOCK_VECTOR is incremented for each block seen. */
6018 static tree *
6019 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
6020 rtx insns;
6021 tree *block_vector;
6022 tree *end_block_vector;
6023 tree *orig_block_stack;
6025 rtx insn;
6026 tree *block_stack = orig_block_stack;
6028 for (insn = insns; insn; insn = NEXT_INSN (insn))
6030 if (GET_CODE (insn) == NOTE)
6032 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6034 tree b;
6036 /* If there are more block notes than BLOCKs, something
6037 is badly wrong. */
6038 if (block_vector == end_block_vector)
6039 abort ();
6041 b = *block_vector++;
6042 NOTE_BLOCK (insn) = b;
6043 *block_stack++ = b;
6045 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6047 /* If there are more NOTE_INSN_BLOCK_ENDs than
6048 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
6049 if (block_stack == orig_block_stack)
6050 abort ();
6052 NOTE_BLOCK (insn) = *--block_stack;
6055 else if (GET_CODE (insn) == CALL_INSN
6056 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6058 rtx cp = PATTERN (insn);
6060 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
6061 end_block_vector, block_stack);
6062 if (XEXP (cp, 1))
6063 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
6064 end_block_vector, block_stack);
6065 if (XEXP (cp, 2))
6066 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
6067 end_block_vector, block_stack);
6071 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
6072 something is badly wrong. */
6073 if (block_stack != orig_block_stack)
6074 abort ();
6076 return block_vector;
6079 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
6080 and create duplicate blocks. */
6081 /* ??? Need an option to either create block fragments or to create
6082 abstract origin duplicates of a source block. It really depends
6083 on what optimization has been performed. */
6085 void
6086 reorder_blocks ()
6088 tree block = DECL_INITIAL (current_function_decl);
6089 varray_type block_stack;
6091 if (block == NULL_TREE)
6092 return;
6094 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
6096 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6097 reorder_blocks_0 (block);
6099 /* Prune the old trees away, so that they don't get in the way. */
6100 BLOCK_SUBBLOCKS (block) = NULL_TREE;
6101 BLOCK_CHAIN (block) = NULL_TREE;
6103 /* Recreate the block tree from the note nesting. */
6104 reorder_blocks_1 (get_insns (), block, &block_stack);
6105 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
6107 /* Remove deleted blocks from the block fragment chains. */
6108 reorder_fix_fragments (block);
6111 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
6113 static void
6114 reorder_blocks_0 (block)
6115 tree block;
6117 while (block)
6119 TREE_ASM_WRITTEN (block) = 0;
6120 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
6121 block = BLOCK_CHAIN (block);
6125 static void
6126 reorder_blocks_1 (insns, current_block, p_block_stack)
6127 rtx insns;
6128 tree current_block;
6129 varray_type *p_block_stack;
6131 rtx insn;
6133 for (insn = insns; insn; insn = NEXT_INSN (insn))
6135 if (GET_CODE (insn) == NOTE)
6137 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6139 tree block = NOTE_BLOCK (insn);
6141 /* If we have seen this block before, that means it now
6142 spans multiple address regions. Create a new fragment. */
6143 if (TREE_ASM_WRITTEN (block))
6145 tree new_block = copy_node (block);
6146 tree origin;
6148 origin = (BLOCK_FRAGMENT_ORIGIN (block)
6149 ? BLOCK_FRAGMENT_ORIGIN (block)
6150 : block);
6151 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
6152 BLOCK_FRAGMENT_CHAIN (new_block)
6153 = BLOCK_FRAGMENT_CHAIN (origin);
6154 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
6156 NOTE_BLOCK (insn) = new_block;
6157 block = new_block;
6160 BLOCK_SUBBLOCKS (block) = 0;
6161 TREE_ASM_WRITTEN (block) = 1;
6162 /* When there's only one block for the entire function,
6163 current_block == block and we mustn't do this, it
6164 will cause infinite recursion. */
6165 if (block != current_block)
6167 BLOCK_SUPERCONTEXT (block) = current_block;
6168 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6169 BLOCK_SUBBLOCKS (current_block) = block;
6170 current_block = block;
6172 VARRAY_PUSH_TREE (*p_block_stack, block);
6174 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6176 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6177 VARRAY_POP (*p_block_stack);
6178 BLOCK_SUBBLOCKS (current_block)
6179 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6180 current_block = BLOCK_SUPERCONTEXT (current_block);
6183 else if (GET_CODE (insn) == CALL_INSN
6184 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6186 rtx cp = PATTERN (insn);
6187 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6188 if (XEXP (cp, 1))
6189 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6190 if (XEXP (cp, 2))
6191 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6196 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6197 appears in the block tree, select one of the fragments to become
6198 the new origin block. */
6200 static void
6201 reorder_fix_fragments (block)
6202 tree block;
6204 while (block)
6206 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6207 tree new_origin = NULL_TREE;
6209 if (dup_origin)
6211 if (! TREE_ASM_WRITTEN (dup_origin))
6213 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6215 /* Find the first of the remaining fragments. There must
6216 be at least one -- the current block. */
6217 while (! TREE_ASM_WRITTEN (new_origin))
6218 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6219 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6222 else if (! dup_origin)
6223 new_origin = block;
6225 /* Re-root the rest of the fragments to the new origin. In the
6226 case that DUP_ORIGIN was null, that means BLOCK was the origin
6227 of a chain of fragments and we want to remove those fragments
6228 that didn't make it to the output. */
6229 if (new_origin)
6231 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6232 tree chain = *pp;
6234 while (chain)
6236 if (TREE_ASM_WRITTEN (chain))
6238 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6239 *pp = chain;
6240 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6242 chain = BLOCK_FRAGMENT_CHAIN (chain);
6244 *pp = NULL_TREE;
6247 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6248 block = BLOCK_CHAIN (block);
6252 /* Reverse the order of elements in the chain T of blocks,
6253 and return the new head of the chain (old last element). */
6255 static tree
6256 blocks_nreverse (t)
6257 tree t;
6259 tree prev = 0, decl, next;
6260 for (decl = t; decl; decl = next)
6262 next = BLOCK_CHAIN (decl);
6263 BLOCK_CHAIN (decl) = prev;
6264 prev = decl;
6266 return prev;
6269 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6270 non-NULL, list them all into VECTOR, in a depth-first preorder
6271 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6272 blocks. */
6274 static int
6275 all_blocks (block, vector)
6276 tree block;
6277 tree *vector;
6279 int n_blocks = 0;
6281 while (block)
6283 TREE_ASM_WRITTEN (block) = 0;
6285 /* Record this block. */
6286 if (vector)
6287 vector[n_blocks] = block;
6289 ++n_blocks;
6291 /* Record the subblocks, and their subblocks... */
6292 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6293 vector ? vector + n_blocks : 0);
6294 block = BLOCK_CHAIN (block);
6297 return n_blocks;
6300 /* Return a vector containing all the blocks rooted at BLOCK. The
6301 number of elements in the vector is stored in N_BLOCKS_P. The
6302 vector is dynamically allocated; it is the caller's responsibility
6303 to call `free' on the pointer returned. */
6305 static tree *
6306 get_block_vector (block, n_blocks_p)
6307 tree block;
6308 int *n_blocks_p;
6310 tree *block_vector;
6312 *n_blocks_p = all_blocks (block, NULL);
6313 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6314 all_blocks (block, block_vector);
6316 return block_vector;
6319 static GTY(()) int next_block_index = 2;
6321 /* Set BLOCK_NUMBER for all the blocks in FN. */
6323 void
6324 number_blocks (fn)
6325 tree fn;
6327 int i;
6328 int n_blocks;
6329 tree *block_vector;
6331 /* For SDB and XCOFF debugging output, we start numbering the blocks
6332 from 1 within each function, rather than keeping a running
6333 count. */
6334 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6335 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6336 next_block_index = 1;
6337 #endif
6339 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6341 /* The top-level BLOCK isn't numbered at all. */
6342 for (i = 1; i < n_blocks; ++i)
6343 /* We number the blocks from two. */
6344 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6346 free (block_vector);
6348 return;
6351 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6353 tree
6354 debug_find_var_in_block_tree (var, block)
6355 tree var;
6356 tree block;
6358 tree t;
6360 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6361 if (t == var)
6362 return block;
6364 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6366 tree ret = debug_find_var_in_block_tree (var, t);
6367 if (ret)
6368 return ret;
6371 return NULL_TREE;
6374 /* Allocate a function structure and reset its contents to the defaults. */
6376 static void
6377 prepare_function_start ()
6379 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6381 init_stmt_for_function ();
6382 init_eh_for_function ();
6384 cse_not_expected = ! optimize;
6386 /* Caller save not needed yet. */
6387 caller_save_needed = 0;
6389 /* No stack slots have been made yet. */
6390 stack_slot_list = 0;
6392 current_function_has_nonlocal_label = 0;
6393 current_function_has_nonlocal_goto = 0;
6395 /* There is no stack slot for handling nonlocal gotos. */
6396 nonlocal_goto_handler_slots = 0;
6397 nonlocal_goto_stack_level = 0;
6399 /* No labels have been declared for nonlocal use. */
6400 nonlocal_labels = 0;
6401 nonlocal_goto_handler_labels = 0;
6403 /* No function calls so far in this function. */
6404 function_call_count = 0;
6406 /* No parm regs have been allocated.
6407 (This is important for output_inline_function.) */
6408 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6410 /* Initialize the RTL mechanism. */
6411 init_emit ();
6413 /* Initialize the queue of pending postincrement and postdecrements,
6414 and some other info in expr.c. */
6415 init_expr ();
6417 /* We haven't done register allocation yet. */
6418 reg_renumber = 0;
6420 init_varasm_status (cfun);
6422 /* Clear out data used for inlining. */
6423 cfun->inlinable = 0;
6424 cfun->original_decl_initial = 0;
6425 cfun->original_arg_vector = 0;
6427 cfun->stack_alignment_needed = STACK_BOUNDARY;
6428 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6430 /* Set if a call to setjmp is seen. */
6431 current_function_calls_setjmp = 0;
6433 /* Set if a call to longjmp is seen. */
6434 current_function_calls_longjmp = 0;
6436 current_function_calls_alloca = 0;
6437 current_function_calls_eh_return = 0;
6438 current_function_calls_constant_p = 0;
6439 current_function_contains_functions = 0;
6440 current_function_is_leaf = 0;
6441 current_function_nothrow = 0;
6442 current_function_sp_is_unchanging = 0;
6443 current_function_uses_only_leaf_regs = 0;
6444 current_function_has_computed_jump = 0;
6445 current_function_is_thunk = 0;
6447 current_function_returns_pcc_struct = 0;
6448 current_function_returns_struct = 0;
6449 current_function_epilogue_delay_list = 0;
6450 current_function_uses_const_pool = 0;
6451 current_function_uses_pic_offset_table = 0;
6452 current_function_cannot_inline = 0;
6454 /* We have not yet needed to make a label to jump to for tail-recursion. */
6455 tail_recursion_label = 0;
6457 /* We haven't had a need to make a save area for ap yet. */
6458 arg_pointer_save_area = 0;
6460 /* No stack slots allocated yet. */
6461 frame_offset = 0;
6463 /* No SAVE_EXPRs in this function yet. */
6464 save_expr_regs = 0;
6466 /* No RTL_EXPRs in this function yet. */
6467 rtl_expr_chain = 0;
6469 /* Set up to allocate temporaries. */
6470 init_temp_slots ();
6472 /* Indicate that we need to distinguish between the return value of the
6473 present function and the return value of a function being called. */
6474 rtx_equal_function_value_matters = 1;
6476 /* Indicate that we have not instantiated virtual registers yet. */
6477 virtuals_instantiated = 0;
6479 /* Indicate that we want CONCATs now. */
6480 generating_concat_p = 1;
6482 /* Indicate we have no need of a frame pointer yet. */
6483 frame_pointer_needed = 0;
6485 /* By default assume not stdarg. */
6486 current_function_stdarg = 0;
6488 /* We haven't made any trampolines for this function yet. */
6489 trampoline_list = 0;
6491 init_pending_stack_adjust ();
6492 inhibit_defer_pop = 0;
6494 current_function_outgoing_args_size = 0;
6496 current_function_funcdef_no = funcdef_no++;
6498 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6500 cfun->max_jumptable_ents = 0;
6502 (*lang_hooks.function.init) (cfun);
6503 if (init_machine_status)
6504 cfun->machine = (*init_machine_status) ();
6507 /* Initialize the rtl expansion mechanism so that we can do simple things
6508 like generate sequences. This is used to provide a context during global
6509 initialization of some passes. */
6510 void
6511 init_dummy_function_start ()
6513 prepare_function_start ();
6516 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6517 and initialize static variables for generating RTL for the statements
6518 of the function. */
6520 void
6521 init_function_start (subr, filename, line)
6522 tree subr;
6523 const char *filename;
6524 int line;
6526 prepare_function_start ();
6528 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6529 cfun->decl = subr;
6531 /* Nonzero if this is a nested function that uses a static chain. */
6533 current_function_needs_context
6534 = (decl_function_context (current_function_decl) != 0
6535 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6537 /* Within function body, compute a type's size as soon it is laid out. */
6538 immediate_size_expand++;
6540 /* Prevent ever trying to delete the first instruction of a function.
6541 Also tell final how to output a linenum before the function prologue.
6542 Note linenums could be missing, e.g. when compiling a Java .class file. */
6543 if (line > 0)
6544 emit_line_note (filename, line);
6546 /* Make sure first insn is a note even if we don't want linenums.
6547 This makes sure the first insn will never be deleted.
6548 Also, final expects a note to appear there. */
6549 emit_note (NULL, NOTE_INSN_DELETED);
6551 /* Set flags used by final.c. */
6552 if (aggregate_value_p (DECL_RESULT (subr)))
6554 #ifdef PCC_STATIC_STRUCT_RETURN
6555 current_function_returns_pcc_struct = 1;
6556 #endif
6557 current_function_returns_struct = 1;
6560 /* Warn if this value is an aggregate type,
6561 regardless of which calling convention we are using for it. */
6562 if (warn_aggregate_return
6563 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6564 warning ("function returns an aggregate");
6566 current_function_returns_pointer
6567 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6570 /* Make sure all values used by the optimization passes have sane
6571 defaults. */
6572 void
6573 init_function_for_compilation ()
6575 reg_renumber = 0;
6577 /* No prologue/epilogue insns yet. */
6578 VARRAY_GROW (prologue, 0);
6579 VARRAY_GROW (epilogue, 0);
6580 VARRAY_GROW (sibcall_epilogue, 0);
6583 /* Expand a call to __main at the beginning of a possible main function. */
6585 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6586 #undef HAS_INIT_SECTION
6587 #define HAS_INIT_SECTION
6588 #endif
6590 void
6591 expand_main_function ()
6593 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6594 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6596 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6597 rtx tmp, seq;
6599 start_sequence ();
6600 /* Forcibly align the stack. */
6601 #ifdef STACK_GROWS_DOWNWARD
6602 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6603 stack_pointer_rtx, 1, OPTAB_WIDEN);
6604 #else
6605 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6606 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6607 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6608 stack_pointer_rtx, 1, OPTAB_WIDEN);
6609 #endif
6610 if (tmp != stack_pointer_rtx)
6611 emit_move_insn (stack_pointer_rtx, tmp);
6613 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6614 tmp = force_reg (Pmode, const0_rtx);
6615 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6616 seq = get_insns ();
6617 end_sequence ();
6619 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6620 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6621 break;
6622 if (tmp)
6623 emit_insn_before (seq, tmp);
6624 else
6625 emit_insn (seq);
6627 #endif
6629 #ifndef HAS_INIT_SECTION
6630 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6631 #endif
6634 /* The PENDING_SIZES represent the sizes of variable-sized types.
6635 Create RTL for the various sizes now (using temporary variables),
6636 so that we can refer to the sizes from the RTL we are generating
6637 for the current function. The PENDING_SIZES are a TREE_LIST. The
6638 TREE_VALUE of each node is a SAVE_EXPR. */
6640 void
6641 expand_pending_sizes (pending_sizes)
6642 tree pending_sizes;
6644 tree tem;
6646 /* Evaluate now the sizes of any types declared among the arguments. */
6647 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6649 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6650 /* Flush the queue in case this parameter declaration has
6651 side-effects. */
6652 emit_queue ();
6656 /* Start the RTL for a new function, and set variables used for
6657 emitting RTL.
6658 SUBR is the FUNCTION_DECL node.
6659 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6660 the function's parameters, which must be run at any return statement. */
6662 void
6663 expand_function_start (subr, parms_have_cleanups)
6664 tree subr;
6665 int parms_have_cleanups;
6667 tree tem;
6668 rtx last_ptr = NULL_RTX;
6670 /* Make sure volatile mem refs aren't considered
6671 valid operands of arithmetic insns. */
6672 init_recog_no_volatile ();
6674 current_function_instrument_entry_exit
6675 = (flag_instrument_function_entry_exit
6676 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6678 current_function_profile
6679 = (profile_flag
6680 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6682 current_function_limit_stack
6683 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6685 /* If function gets a static chain arg, store it in the stack frame.
6686 Do this first, so it gets the first stack slot offset. */
6687 if (current_function_needs_context)
6689 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6691 /* Delay copying static chain if it is not a register to avoid
6692 conflicts with regs used for parameters. */
6693 if (! SMALL_REGISTER_CLASSES
6694 || GET_CODE (static_chain_incoming_rtx) == REG)
6695 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6698 /* If the parameters of this function need cleaning up, get a label
6699 for the beginning of the code which executes those cleanups. This must
6700 be done before doing anything with return_label. */
6701 if (parms_have_cleanups)
6702 cleanup_label = gen_label_rtx ();
6703 else
6704 cleanup_label = 0;
6706 /* Make the label for return statements to jump to. Do not special
6707 case machines with special return instructions -- they will be
6708 handled later during jump, ifcvt, or epilogue creation. */
6709 return_label = gen_label_rtx ();
6711 /* Initialize rtx used to return the value. */
6712 /* Do this before assign_parms so that we copy the struct value address
6713 before any library calls that assign parms might generate. */
6715 /* Decide whether to return the value in memory or in a register. */
6716 if (aggregate_value_p (DECL_RESULT (subr)))
6718 /* Returning something that won't go in a register. */
6719 rtx value_address = 0;
6721 #ifdef PCC_STATIC_STRUCT_RETURN
6722 if (current_function_returns_pcc_struct)
6724 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6725 value_address = assemble_static_space (size);
6727 else
6728 #endif
6730 /* Expect to be passed the address of a place to store the value.
6731 If it is passed as an argument, assign_parms will take care of
6732 it. */
6733 if (struct_value_incoming_rtx)
6735 value_address = gen_reg_rtx (Pmode);
6736 emit_move_insn (value_address, struct_value_incoming_rtx);
6739 if (value_address)
6741 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6742 set_mem_attributes (x, DECL_RESULT (subr), 1);
6743 SET_DECL_RTL (DECL_RESULT (subr), x);
6746 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6747 /* If return mode is void, this decl rtl should not be used. */
6748 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6749 else
6751 /* Compute the return values into a pseudo reg, which we will copy
6752 into the true return register after the cleanups are done. */
6754 /* In order to figure out what mode to use for the pseudo, we
6755 figure out what the mode of the eventual return register will
6756 actually be, and use that. */
6757 rtx hard_reg
6758 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6759 subr, 1);
6761 /* Structures that are returned in registers are not aggregate_value_p,
6762 so we may see a PARALLEL or a REG. */
6763 if (REG_P (hard_reg))
6764 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6765 else if (GET_CODE (hard_reg) == PARALLEL)
6766 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6767 else
6768 abort ();
6770 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6771 result to the real return register(s). */
6772 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6775 /* Initialize rtx for parameters and local variables.
6776 In some cases this requires emitting insns. */
6778 assign_parms (subr);
6780 /* Copy the static chain now if it wasn't a register. The delay is to
6781 avoid conflicts with the parameter passing registers. */
6783 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6784 if (GET_CODE (static_chain_incoming_rtx) != REG)
6785 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6787 /* The following was moved from init_function_start.
6788 The move is supposed to make sdb output more accurate. */
6789 /* Indicate the beginning of the function body,
6790 as opposed to parm setup. */
6791 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6793 if (GET_CODE (get_last_insn ()) != NOTE)
6794 emit_note (NULL, NOTE_INSN_DELETED);
6795 parm_birth_insn = get_last_insn ();
6797 context_display = 0;
6798 if (current_function_needs_context)
6800 /* Fetch static chain values for containing functions. */
6801 tem = decl_function_context (current_function_decl);
6802 /* Copy the static chain pointer into a pseudo. If we have
6803 small register classes, copy the value from memory if
6804 static_chain_incoming_rtx is a REG. */
6805 if (tem)
6807 /* If the static chain originally came in a register, put it back
6808 there, then move it out in the next insn. The reason for
6809 this peculiar code is to satisfy function integration. */
6810 if (SMALL_REGISTER_CLASSES
6811 && GET_CODE (static_chain_incoming_rtx) == REG)
6812 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6813 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6816 while (tem)
6818 tree rtlexp = make_node (RTL_EXPR);
6820 RTL_EXPR_RTL (rtlexp) = last_ptr;
6821 context_display = tree_cons (tem, rtlexp, context_display);
6822 tem = decl_function_context (tem);
6823 if (tem == 0)
6824 break;
6825 /* Chain thru stack frames, assuming pointer to next lexical frame
6826 is found at the place we always store it. */
6827 #ifdef FRAME_GROWS_DOWNWARD
6828 last_ptr = plus_constant (last_ptr,
6829 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6830 #endif
6831 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6832 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6833 last_ptr = copy_to_reg (last_ptr);
6835 /* If we are not optimizing, ensure that we know that this
6836 piece of context is live over the entire function. */
6837 if (! optimize)
6838 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6839 save_expr_regs);
6843 if (current_function_instrument_entry_exit)
6845 rtx fun = DECL_RTL (current_function_decl);
6846 if (GET_CODE (fun) == MEM)
6847 fun = XEXP (fun, 0);
6848 else
6849 abort ();
6850 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6851 2, fun, Pmode,
6852 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6854 hard_frame_pointer_rtx),
6855 Pmode);
6858 if (current_function_profile)
6860 #ifdef PROFILE_HOOK
6861 PROFILE_HOOK (current_function_funcdef_no);
6862 #endif
6865 /* After the display initializations is where the tail-recursion label
6866 should go, if we end up needing one. Ensure we have a NOTE here
6867 since some things (like trampolines) get placed before this. */
6868 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6870 /* Evaluate now the sizes of any types declared among the arguments. */
6871 expand_pending_sizes (nreverse (get_pending_sizes ()));
6873 /* Make sure there is a line number after the function entry setup code. */
6874 force_next_line_note ();
6877 /* Undo the effects of init_dummy_function_start. */
6878 void
6879 expand_dummy_function_end ()
6881 /* End any sequences that failed to be closed due to syntax errors. */
6882 while (in_sequence_p ())
6883 end_sequence ();
6885 /* Outside function body, can't compute type's actual size
6886 until next function's body starts. */
6888 free_after_parsing (cfun);
6889 free_after_compilation (cfun);
6890 cfun = 0;
6893 /* Call DOIT for each hard register used as a return value from
6894 the current function. */
6896 void
6897 diddle_return_value (doit, arg)
6898 void (*doit) PARAMS ((rtx, void *));
6899 void *arg;
6901 rtx outgoing = current_function_return_rtx;
6903 if (! outgoing)
6904 return;
6906 if (GET_CODE (outgoing) == REG)
6907 (*doit) (outgoing, arg);
6908 else if (GET_CODE (outgoing) == PARALLEL)
6910 int i;
6912 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6914 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6916 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6917 (*doit) (x, arg);
6922 static void
6923 do_clobber_return_reg (reg, arg)
6924 rtx reg;
6925 void *arg ATTRIBUTE_UNUSED;
6927 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6930 void
6931 clobber_return_register ()
6933 diddle_return_value (do_clobber_return_reg, NULL);
6935 /* In case we do use pseudo to return value, clobber it too. */
6936 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6938 tree decl_result = DECL_RESULT (current_function_decl);
6939 rtx decl_rtl = DECL_RTL (decl_result);
6940 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6942 do_clobber_return_reg (decl_rtl, NULL);
6947 static void
6948 do_use_return_reg (reg, arg)
6949 rtx reg;
6950 void *arg ATTRIBUTE_UNUSED;
6952 emit_insn (gen_rtx_USE (VOIDmode, reg));
6955 void
6956 use_return_register ()
6958 diddle_return_value (do_use_return_reg, NULL);
6961 static GTY(()) rtx initial_trampoline;
6963 /* Generate RTL for the end of the current function.
6964 FILENAME and LINE are the current position in the source file.
6966 It is up to language-specific callers to do cleanups for parameters--
6967 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6969 void
6970 expand_function_end (filename, line, end_bindings)
6971 const char *filename;
6972 int line;
6973 int end_bindings;
6975 tree link;
6976 rtx clobber_after;
6978 finish_expr_for_function ();
6980 /* If arg_pointer_save_area was referenced only from a nested
6981 function, we will not have initialized it yet. Do that now. */
6982 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6983 get_arg_pointer_save_area (cfun);
6985 #ifdef NON_SAVING_SETJMP
6986 /* Don't put any variables in registers if we call setjmp
6987 on a machine that fails to restore the registers. */
6988 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6990 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6991 setjmp_protect (DECL_INITIAL (current_function_decl));
6993 setjmp_protect_args ();
6995 #endif
6997 /* Initialize any trampolines required by this function. */
6998 for (link = trampoline_list; link; link = TREE_CHAIN (link))
7000 tree function = TREE_PURPOSE (link);
7001 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
7002 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7003 #ifdef TRAMPOLINE_TEMPLATE
7004 rtx blktramp;
7005 #endif
7006 rtx seq;
7008 #ifdef TRAMPOLINE_TEMPLATE
7009 /* First make sure this compilation has a template for
7010 initializing trampolines. */
7011 if (initial_trampoline == 0)
7013 initial_trampoline
7014 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
7015 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
7017 #endif
7019 /* Generate insns to initialize the trampoline. */
7020 start_sequence ();
7021 tramp = round_trampoline_addr (XEXP (tramp, 0));
7022 #ifdef TRAMPOLINE_TEMPLATE
7023 blktramp = replace_equiv_address (initial_trampoline, tramp);
7024 emit_block_move (blktramp, initial_trampoline,
7025 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
7026 #endif
7027 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
7028 seq = get_insns ();
7029 end_sequence ();
7031 /* Put those insns at entry to the containing function (this one). */
7032 emit_insn_before (seq, tail_recursion_reentry);
7035 /* If we are doing stack checking and this function makes calls,
7036 do a stack probe at the start of the function to ensure we have enough
7037 space for another stack frame. */
7038 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
7040 rtx insn, seq;
7042 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7043 if (GET_CODE (insn) == CALL_INSN)
7045 start_sequence ();
7046 probe_stack_range (STACK_CHECK_PROTECT,
7047 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
7048 seq = get_insns ();
7049 end_sequence ();
7050 emit_insn_before (seq, tail_recursion_reentry);
7051 break;
7055 /* Possibly warn about unused parameters. */
7056 if (warn_unused_parameter)
7058 tree decl;
7060 for (decl = DECL_ARGUMENTS (current_function_decl);
7061 decl; decl = TREE_CHAIN (decl))
7062 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
7063 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
7064 warning_with_decl (decl, "unused parameter `%s'");
7067 /* Delete handlers for nonlocal gotos if nothing uses them. */
7068 if (nonlocal_goto_handler_slots != 0
7069 && ! current_function_has_nonlocal_label)
7070 delete_handlers ();
7072 /* End any sequences that failed to be closed due to syntax errors. */
7073 while (in_sequence_p ())
7074 end_sequence ();
7076 /* Outside function body, can't compute type's actual size
7077 until next function's body starts. */
7078 immediate_size_expand--;
7080 clear_pending_stack_adjust ();
7081 do_pending_stack_adjust ();
7083 /* Mark the end of the function body.
7084 If control reaches this insn, the function can drop through
7085 without returning a value. */
7086 emit_note (NULL, NOTE_INSN_FUNCTION_END);
7088 /* Must mark the last line number note in the function, so that the test
7089 coverage code can avoid counting the last line twice. This just tells
7090 the code to ignore the immediately following line note, since there
7091 already exists a copy of this note somewhere above. This line number
7092 note is still needed for debugging though, so we can't delete it. */
7093 if (flag_test_coverage)
7094 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
7096 /* Output a linenumber for the end of the function.
7097 SDB depends on this. */
7098 emit_line_note_force (filename, line);
7100 /* Before the return label (if any), clobber the return
7101 registers so that they are not propagated live to the rest of
7102 the function. This can only happen with functions that drop
7103 through; if there had been a return statement, there would
7104 have either been a return rtx, or a jump to the return label.
7106 We delay actual code generation after the current_function_value_rtx
7107 is computed. */
7108 clobber_after = get_last_insn ();
7110 /* Output the label for the actual return from the function,
7111 if one is expected. This happens either because a function epilogue
7112 is used instead of a return instruction, or because a return was done
7113 with a goto in order to run local cleanups, or because of pcc-style
7114 structure returning. */
7115 if (return_label)
7116 emit_label (return_label);
7118 /* C++ uses this. */
7119 if (end_bindings)
7120 expand_end_bindings (0, 0, 0);
7122 if (current_function_instrument_entry_exit)
7124 rtx fun = DECL_RTL (current_function_decl);
7125 if (GET_CODE (fun) == MEM)
7126 fun = XEXP (fun, 0);
7127 else
7128 abort ();
7129 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
7130 2, fun, Pmode,
7131 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
7133 hard_frame_pointer_rtx),
7134 Pmode);
7137 /* Let except.c know where it should emit the call to unregister
7138 the function context for sjlj exceptions. */
7139 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
7140 sjlj_emit_function_exit_after (get_last_insn ());
7142 /* If we had calls to alloca, and this machine needs
7143 an accurate stack pointer to exit the function,
7144 insert some code to save and restore the stack pointer. */
7145 #ifdef EXIT_IGNORE_STACK
7146 if (! EXIT_IGNORE_STACK)
7147 #endif
7148 if (current_function_calls_alloca)
7150 rtx tem = 0;
7152 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7153 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7156 /* If scalar return value was computed in a pseudo-reg, or was a named
7157 return value that got dumped to the stack, copy that to the hard
7158 return register. */
7159 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7161 tree decl_result = DECL_RESULT (current_function_decl);
7162 rtx decl_rtl = DECL_RTL (decl_result);
7164 if (REG_P (decl_rtl)
7165 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7166 : DECL_REGISTER (decl_result))
7168 rtx real_decl_rtl = current_function_return_rtx;
7170 /* This should be set in assign_parms. */
7171 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7172 abort ();
7174 /* If this is a BLKmode structure being returned in registers,
7175 then use the mode computed in expand_return. Note that if
7176 decl_rtl is memory, then its mode may have been changed,
7177 but that current_function_return_rtx has not. */
7178 if (GET_MODE (real_decl_rtl) == BLKmode)
7179 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7181 /* If a named return value dumped decl_return to memory, then
7182 we may need to re-do the PROMOTE_MODE signed/unsigned
7183 extension. */
7184 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7186 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7188 #ifdef PROMOTE_FUNCTION_RETURN
7189 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7190 &unsignedp, 1);
7191 #endif
7193 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7195 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7197 /* If expand_function_start has created a PARALLEL for decl_rtl,
7198 move the result to the real return registers. Otherwise, do
7199 a group load from decl_rtl for a named return. */
7200 if (GET_CODE (decl_rtl) == PARALLEL)
7201 emit_group_move (real_decl_rtl, decl_rtl);
7202 else
7203 emit_group_load (real_decl_rtl, decl_rtl,
7204 int_size_in_bytes (TREE_TYPE (decl_result)));
7206 else
7207 emit_move_insn (real_decl_rtl, decl_rtl);
7211 /* If returning a structure, arrange to return the address of the value
7212 in a place where debuggers expect to find it.
7214 If returning a structure PCC style,
7215 the caller also depends on this value.
7216 And current_function_returns_pcc_struct is not necessarily set. */
7217 if (current_function_returns_struct
7218 || current_function_returns_pcc_struct)
7220 rtx value_address
7221 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7222 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7223 #ifdef FUNCTION_OUTGOING_VALUE
7224 rtx outgoing
7225 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7226 current_function_decl);
7227 #else
7228 rtx outgoing
7229 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7230 #endif
7232 /* Mark this as a function return value so integrate will delete the
7233 assignment and USE below when inlining this function. */
7234 REG_FUNCTION_VALUE_P (outgoing) = 1;
7236 #ifdef POINTERS_EXTEND_UNSIGNED
7237 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7238 if (GET_MODE (outgoing) != GET_MODE (value_address))
7239 value_address = convert_memory_address (GET_MODE (outgoing),
7240 value_address);
7241 #endif
7243 emit_move_insn (outgoing, value_address);
7245 /* Show return register used to hold result (in this case the address
7246 of the result. */
7247 current_function_return_rtx = outgoing;
7250 /* If this is an implementation of throw, do what's necessary to
7251 communicate between __builtin_eh_return and the epilogue. */
7252 expand_eh_return ();
7254 /* Emit the actual code to clobber return register. */
7256 rtx seq, after;
7258 start_sequence ();
7259 clobber_return_register ();
7260 seq = get_insns ();
7261 end_sequence ();
7263 after = emit_insn_after (seq, clobber_after);
7265 if (clobber_after != after)
7266 cfun->x_clobber_return_insn = after;
7269 /* ??? This should no longer be necessary since stupid is no longer with
7270 us, but there are some parts of the compiler (eg reload_combine, and
7271 sh mach_dep_reorg) that still try and compute their own lifetime info
7272 instead of using the general framework. */
7273 use_return_register ();
7275 /* Fix up any gotos that jumped out to the outermost
7276 binding level of the function.
7277 Must follow emitting RETURN_LABEL. */
7279 /* If you have any cleanups to do at this point,
7280 and they need to create temporary variables,
7281 then you will lose. */
7282 expand_fixups (get_insns ());
7286 get_arg_pointer_save_area (f)
7287 struct function *f;
7289 rtx ret = f->x_arg_pointer_save_area;
7291 if (! ret)
7293 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7294 f->x_arg_pointer_save_area = ret;
7297 if (f == cfun && ! f->arg_pointer_save_area_init)
7299 rtx seq;
7301 /* Save the arg pointer at the beginning of the function. The
7302 generated stack slot may not be a valid memory address, so we
7303 have to check it and fix it if necessary. */
7304 start_sequence ();
7305 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7306 seq = get_insns ();
7307 end_sequence ();
7309 push_topmost_sequence ();
7310 emit_insn_after (seq, get_insns ());
7311 pop_topmost_sequence ();
7314 return ret;
7317 /* Extend a vector that records the INSN_UIDs of INSNS
7318 (a list of one or more insns). */
7320 static void
7321 record_insns (insns, vecp)
7322 rtx insns;
7323 varray_type *vecp;
7325 int i, len;
7326 rtx tmp;
7328 tmp = insns;
7329 len = 0;
7330 while (tmp != NULL_RTX)
7332 len++;
7333 tmp = NEXT_INSN (tmp);
7336 i = VARRAY_SIZE (*vecp);
7337 VARRAY_GROW (*vecp, i + len);
7338 tmp = insns;
7339 while (tmp != NULL_RTX)
7341 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7342 i++;
7343 tmp = NEXT_INSN (tmp);
7347 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7348 be running after reorg, SEQUENCE rtl is possible. */
7350 static int
7351 contains (insn, vec)
7352 rtx insn;
7353 varray_type vec;
7355 int i, j;
7357 if (GET_CODE (insn) == INSN
7358 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7360 int count = 0;
7361 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7362 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7363 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7364 count++;
7365 return count;
7367 else
7369 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7370 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7371 return 1;
7373 return 0;
7377 prologue_epilogue_contains (insn)
7378 rtx insn;
7380 if (contains (insn, prologue))
7381 return 1;
7382 if (contains (insn, epilogue))
7383 return 1;
7384 return 0;
7388 sibcall_epilogue_contains (insn)
7389 rtx insn;
7391 if (sibcall_epilogue)
7392 return contains (insn, sibcall_epilogue);
7393 return 0;
7396 #ifdef HAVE_return
7397 /* Insert gen_return at the end of block BB. This also means updating
7398 block_for_insn appropriately. */
7400 static void
7401 emit_return_into_block (bb, line_note)
7402 basic_block bb;
7403 rtx line_note;
7405 emit_jump_insn_after (gen_return (), bb->end);
7406 if (line_note)
7407 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7408 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7410 #endif /* HAVE_return */
7412 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7414 /* These functions convert the epilogue into a variant that does not modify the
7415 stack pointer. This is used in cases where a function returns an object
7416 whose size is not known until it is computed. The called function leaves the
7417 object on the stack, leaves the stack depressed, and returns a pointer to
7418 the object.
7420 What we need to do is track all modifications and references to the stack
7421 pointer, deleting the modifications and changing the references to point to
7422 the location the stack pointer would have pointed to had the modifications
7423 taken place.
7425 These functions need to be portable so we need to make as few assumptions
7426 about the epilogue as we can. However, the epilogue basically contains
7427 three things: instructions to reset the stack pointer, instructions to
7428 reload registers, possibly including the frame pointer, and an
7429 instruction to return to the caller.
7431 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7432 We also make no attempt to validate the insns we make since if they are
7433 invalid, we probably can't do anything valid. The intent is that these
7434 routines get "smarter" as more and more machines start to use them and
7435 they try operating on different epilogues.
7437 We use the following structure to track what the part of the epilogue that
7438 we've already processed has done. We keep two copies of the SP equivalence,
7439 one for use during the insn we are processing and one for use in the next
7440 insn. The difference is because one part of a PARALLEL may adjust SP
7441 and the other may use it. */
7443 struct epi_info
7445 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7446 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7447 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7448 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7449 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7450 should be set to once we no longer need
7451 its value. */
7454 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7455 static void emit_equiv_load PARAMS ((struct epi_info *));
7457 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7458 no modifications to the stack pointer. Return the new list of insns. */
7460 static rtx
7461 keep_stack_depressed (insns)
7462 rtx insns;
7464 int j;
7465 struct epi_info info;
7466 rtx insn, next;
7468 /* If the epilogue is just a single instruction, it ust be OK as is. */
7470 if (NEXT_INSN (insns) == NULL_RTX)
7471 return insns;
7473 /* Otherwise, start a sequence, initialize the information we have, and
7474 process all the insns we were given. */
7475 start_sequence ();
7477 info.sp_equiv_reg = stack_pointer_rtx;
7478 info.sp_offset = 0;
7479 info.equiv_reg_src = 0;
7481 insn = insns;
7482 next = NULL_RTX;
7483 while (insn != NULL_RTX)
7485 next = NEXT_INSN (insn);
7487 if (!INSN_P (insn))
7489 add_insn (insn);
7490 insn = next;
7491 continue;
7494 /* If this insn references the register that SP is equivalent to and
7495 we have a pending load to that register, we must force out the load
7496 first and then indicate we no longer know what SP's equivalent is. */
7497 if (info.equiv_reg_src != 0
7498 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7500 emit_equiv_load (&info);
7501 info.sp_equiv_reg = 0;
7504 info.new_sp_equiv_reg = info.sp_equiv_reg;
7505 info.new_sp_offset = info.sp_offset;
7507 /* If this is a (RETURN) and the return address is on the stack,
7508 update the address and change to an indirect jump. */
7509 if (GET_CODE (PATTERN (insn)) == RETURN
7510 || (GET_CODE (PATTERN (insn)) == PARALLEL
7511 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7513 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7514 rtx base = 0;
7515 HOST_WIDE_INT offset = 0;
7516 rtx jump_insn, jump_set;
7518 /* If the return address is in a register, we can emit the insn
7519 unchanged. Otherwise, it must be a MEM and we see what the
7520 base register and offset are. In any case, we have to emit any
7521 pending load to the equivalent reg of SP, if any. */
7522 if (GET_CODE (retaddr) == REG)
7524 emit_equiv_load (&info);
7525 add_insn (insn);
7526 insn = next;
7527 continue;
7529 else if (GET_CODE (retaddr) == MEM
7530 && GET_CODE (XEXP (retaddr, 0)) == REG)
7531 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7532 else if (GET_CODE (retaddr) == MEM
7533 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7534 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7535 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7537 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7538 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7540 else
7541 abort ();
7543 /* If the base of the location containing the return pointer
7544 is SP, we must update it with the replacement address. Otherwise,
7545 just build the necessary MEM. */
7546 retaddr = plus_constant (base, offset);
7547 if (base == stack_pointer_rtx)
7548 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7549 plus_constant (info.sp_equiv_reg,
7550 info.sp_offset));
7552 retaddr = gen_rtx_MEM (Pmode, retaddr);
7554 /* If there is a pending load to the equivalent register for SP
7555 and we reference that register, we must load our address into
7556 a scratch register and then do that load. */
7557 if (info.equiv_reg_src
7558 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7560 unsigned int regno;
7561 rtx reg;
7563 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7564 if (HARD_REGNO_MODE_OK (regno, Pmode)
7565 && !fixed_regs[regno]
7566 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7567 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7568 regno)
7569 && !refers_to_regno_p (regno,
7570 regno + HARD_REGNO_NREGS (regno,
7571 Pmode),
7572 info.equiv_reg_src, NULL))
7573 break;
7575 if (regno == FIRST_PSEUDO_REGISTER)
7576 abort ();
7578 reg = gen_rtx_REG (Pmode, regno);
7579 emit_move_insn (reg, retaddr);
7580 retaddr = reg;
7583 emit_equiv_load (&info);
7584 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7586 /* Show the SET in the above insn is a RETURN. */
7587 jump_set = single_set (jump_insn);
7588 if (jump_set == 0)
7589 abort ();
7590 else
7591 SET_IS_RETURN_P (jump_set) = 1;
7594 /* If SP is not mentioned in the pattern and its equivalent register, if
7595 any, is not modified, just emit it. Otherwise, if neither is set,
7596 replace the reference to SP and emit the insn. If none of those are
7597 true, handle each SET individually. */
7598 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7599 && (info.sp_equiv_reg == stack_pointer_rtx
7600 || !reg_set_p (info.sp_equiv_reg, insn)))
7601 add_insn (insn);
7602 else if (! reg_set_p (stack_pointer_rtx, insn)
7603 && (info.sp_equiv_reg == stack_pointer_rtx
7604 || !reg_set_p (info.sp_equiv_reg, insn)))
7606 if (! validate_replace_rtx (stack_pointer_rtx,
7607 plus_constant (info.sp_equiv_reg,
7608 info.sp_offset),
7609 insn))
7610 abort ();
7612 add_insn (insn);
7614 else if (GET_CODE (PATTERN (insn)) == SET)
7615 handle_epilogue_set (PATTERN (insn), &info);
7616 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7618 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7619 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7620 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7622 else
7623 add_insn (insn);
7625 info.sp_equiv_reg = info.new_sp_equiv_reg;
7626 info.sp_offset = info.new_sp_offset;
7628 insn = next;
7631 insns = get_insns ();
7632 end_sequence ();
7633 return insns;
7636 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7637 structure that contains information about what we've seen so far. We
7638 process this SET by either updating that data or by emitting one or
7639 more insns. */
7641 static void
7642 handle_epilogue_set (set, p)
7643 rtx set;
7644 struct epi_info *p;
7646 /* First handle the case where we are setting SP. Record what it is being
7647 set from. If unknown, abort. */
7648 if (reg_set_p (stack_pointer_rtx, set))
7650 if (SET_DEST (set) != stack_pointer_rtx)
7651 abort ();
7653 if (GET_CODE (SET_SRC (set)) == PLUS
7654 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7656 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7657 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7659 else
7660 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7662 /* If we are adjusting SP, we adjust from the old data. */
7663 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7665 p->new_sp_equiv_reg = p->sp_equiv_reg;
7666 p->new_sp_offset += p->sp_offset;
7669 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7670 abort ();
7672 return;
7675 /* Next handle the case where we are setting SP's equivalent register.
7676 If we already have a value to set it to, abort. We could update, but
7677 there seems little point in handling that case. Note that we have
7678 to allow for the case where we are setting the register set in
7679 the previous part of a PARALLEL inside a single insn. But use the
7680 old offset for any updates within this insn. */
7681 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7683 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7684 || p->equiv_reg_src != 0)
7685 abort ();
7686 else
7687 p->equiv_reg_src
7688 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7689 plus_constant (p->sp_equiv_reg,
7690 p->sp_offset));
7693 /* Otherwise, replace any references to SP in the insn to its new value
7694 and emit the insn. */
7695 else
7697 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7698 plus_constant (p->sp_equiv_reg,
7699 p->sp_offset));
7700 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7701 plus_constant (p->sp_equiv_reg,
7702 p->sp_offset));
7703 emit_insn (set);
7707 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7709 static void
7710 emit_equiv_load (p)
7711 struct epi_info *p;
7713 if (p->equiv_reg_src != 0)
7714 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7716 p->equiv_reg_src = 0;
7718 #endif
7720 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7721 this into place with notes indicating where the prologue ends and where
7722 the epilogue begins. Update the basic block information when possible. */
7724 void
7725 thread_prologue_and_epilogue_insns (f)
7726 rtx f ATTRIBUTE_UNUSED;
7728 int inserted = 0;
7729 edge e;
7730 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7731 rtx seq;
7732 #endif
7733 #ifdef HAVE_prologue
7734 rtx prologue_end = NULL_RTX;
7735 #endif
7736 #if defined (HAVE_epilogue) || defined(HAVE_return)
7737 rtx epilogue_end = NULL_RTX;
7738 #endif
7740 #ifdef HAVE_prologue
7741 if (HAVE_prologue)
7743 start_sequence ();
7744 seq = gen_prologue ();
7745 emit_insn (seq);
7747 /* Retain a map of the prologue insns. */
7748 record_insns (seq, &prologue);
7749 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7751 seq = get_insns ();
7752 end_sequence ();
7754 /* Can't deal with multiple successors of the entry block
7755 at the moment. Function should always have at least one
7756 entry point. */
7757 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7758 abort ();
7760 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7761 inserted = 1;
7763 #endif
7765 /* If the exit block has no non-fake predecessors, we don't need
7766 an epilogue. */
7767 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7768 if ((e->flags & EDGE_FAKE) == 0)
7769 break;
7770 if (e == NULL)
7771 goto epilogue_done;
7773 #ifdef HAVE_return
7774 if (optimize && HAVE_return)
7776 /* If we're allowed to generate a simple return instruction,
7777 then by definition we don't need a full epilogue. Examine
7778 the block that falls through to EXIT. If it does not
7779 contain any code, examine its predecessors and try to
7780 emit (conditional) return instructions. */
7782 basic_block last;
7783 edge e_next;
7784 rtx label;
7786 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7787 if (e->flags & EDGE_FALLTHRU)
7788 break;
7789 if (e == NULL)
7790 goto epilogue_done;
7791 last = e->src;
7793 /* Verify that there are no active instructions in the last block. */
7794 label = last->end;
7795 while (label && GET_CODE (label) != CODE_LABEL)
7797 if (active_insn_p (label))
7798 break;
7799 label = PREV_INSN (label);
7802 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7804 rtx epilogue_line_note = NULL_RTX;
7806 /* Locate the line number associated with the closing brace,
7807 if we can find one. */
7808 for (seq = get_last_insn ();
7809 seq && ! active_insn_p (seq);
7810 seq = PREV_INSN (seq))
7811 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7813 epilogue_line_note = seq;
7814 break;
7817 for (e = last->pred; e; e = e_next)
7819 basic_block bb = e->src;
7820 rtx jump;
7822 e_next = e->pred_next;
7823 if (bb == ENTRY_BLOCK_PTR)
7824 continue;
7826 jump = bb->end;
7827 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7828 continue;
7830 /* If we have an unconditional jump, we can replace that
7831 with a simple return instruction. */
7832 if (simplejump_p (jump))
7834 emit_return_into_block (bb, epilogue_line_note);
7835 delete_insn (jump);
7838 /* If we have a conditional jump, we can try to replace
7839 that with a conditional return instruction. */
7840 else if (condjump_p (jump))
7842 if (! redirect_jump (jump, 0, 0))
7843 continue;
7845 /* If this block has only one successor, it both jumps
7846 and falls through to the fallthru block, so we can't
7847 delete the edge. */
7848 if (bb->succ->succ_next == NULL)
7849 continue;
7851 else
7852 continue;
7854 /* Fix up the CFG for the successful change we just made. */
7855 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7858 /* Emit a return insn for the exit fallthru block. Whether
7859 this is still reachable will be determined later. */
7861 emit_barrier_after (last->end);
7862 emit_return_into_block (last, epilogue_line_note);
7863 epilogue_end = last->end;
7864 last->succ->flags &= ~EDGE_FALLTHRU;
7865 goto epilogue_done;
7868 #endif
7869 #ifdef HAVE_epilogue
7870 if (HAVE_epilogue)
7872 /* Find the edge that falls through to EXIT. Other edges may exist
7873 due to RETURN instructions, but those don't need epilogues.
7874 There really shouldn't be a mixture -- either all should have
7875 been converted or none, however... */
7877 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7878 if (e->flags & EDGE_FALLTHRU)
7879 break;
7880 if (e == NULL)
7881 goto epilogue_done;
7883 start_sequence ();
7884 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7886 seq = gen_epilogue ();
7888 #ifdef INCOMING_RETURN_ADDR_RTX
7889 /* If this function returns with the stack depressed and we can support
7890 it, massage the epilogue to actually do that. */
7891 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7892 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7893 seq = keep_stack_depressed (seq);
7894 #endif
7896 emit_jump_insn (seq);
7898 /* Retain a map of the epilogue insns. */
7899 record_insns (seq, &epilogue);
7901 seq = get_insns ();
7902 end_sequence ();
7904 insert_insn_on_edge (seq, e);
7905 inserted = 1;
7907 #endif
7908 epilogue_done:
7910 if (inserted)
7911 commit_edge_insertions ();
7913 #ifdef HAVE_sibcall_epilogue
7914 /* Emit sibling epilogues before any sibling call sites. */
7915 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7917 basic_block bb = e->src;
7918 rtx insn = bb->end;
7919 rtx i;
7920 rtx newinsn;
7922 if (GET_CODE (insn) != CALL_INSN
7923 || ! SIBLING_CALL_P (insn))
7924 continue;
7926 start_sequence ();
7927 emit_insn (gen_sibcall_epilogue ());
7928 seq = get_insns ();
7929 end_sequence ();
7931 /* Retain a map of the epilogue insns. Used in life analysis to
7932 avoid getting rid of sibcall epilogue insns. Do this before we
7933 actually emit the sequence. */
7934 record_insns (seq, &sibcall_epilogue);
7936 i = PREV_INSN (insn);
7937 newinsn = emit_insn_before (seq, insn);
7939 #endif
7941 #ifdef HAVE_prologue
7942 if (prologue_end)
7944 rtx insn, prev;
7946 /* GDB handles `break f' by setting a breakpoint on the first
7947 line note after the prologue. Which means (1) that if
7948 there are line number notes before where we inserted the
7949 prologue we should move them, and (2) we should generate a
7950 note before the end of the first basic block, if there isn't
7951 one already there.
7953 ??? This behavior is completely broken when dealing with
7954 multiple entry functions. We simply place the note always
7955 into first basic block and let alternate entry points
7956 to be missed.
7959 for (insn = prologue_end; insn; insn = prev)
7961 prev = PREV_INSN (insn);
7962 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7964 /* Note that we cannot reorder the first insn in the
7965 chain, since rest_of_compilation relies on that
7966 remaining constant. */
7967 if (prev == NULL)
7968 break;
7969 reorder_insns (insn, insn, prologue_end);
7973 /* Find the last line number note in the first block. */
7974 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7975 insn != prologue_end && insn;
7976 insn = PREV_INSN (insn))
7977 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7978 break;
7980 /* If we didn't find one, make a copy of the first line number
7981 we run across. */
7982 if (! insn)
7984 for (insn = next_active_insn (prologue_end);
7985 insn;
7986 insn = PREV_INSN (insn))
7987 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7989 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7990 NOTE_LINE_NUMBER (insn),
7991 prologue_end);
7992 break;
7996 #endif
7997 #ifdef HAVE_epilogue
7998 if (epilogue_end)
8000 rtx insn, next;
8002 /* Similarly, move any line notes that appear after the epilogue.
8003 There is no need, however, to be quite so anal about the existence
8004 of such a note. */
8005 for (insn = epilogue_end; insn; insn = next)
8007 next = NEXT_INSN (insn);
8008 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
8009 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
8012 #endif
8015 /* Reposition the prologue-end and epilogue-begin notes after instruction
8016 scheduling and delayed branch scheduling. */
8018 void
8019 reposition_prologue_and_epilogue_notes (f)
8020 rtx f ATTRIBUTE_UNUSED;
8022 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
8023 rtx insn, last, note;
8024 int len;
8026 if ((len = VARRAY_SIZE (prologue)) > 0)
8028 last = 0, note = 0;
8030 /* Scan from the beginning until we reach the last prologue insn.
8031 We apparently can't depend on basic_block_{head,end} after
8032 reorg has run. */
8033 for (insn = f; insn; insn = NEXT_INSN (insn))
8035 if (GET_CODE (insn) == NOTE)
8037 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
8038 note = insn;
8040 else if (contains (insn, prologue))
8042 last = insn;
8043 if (--len == 0)
8044 break;
8048 if (last)
8050 /* Find the prologue-end note if we haven't already, and
8051 move it to just after the last prologue insn. */
8052 if (note == 0)
8054 for (note = last; (note = NEXT_INSN (note));)
8055 if (GET_CODE (note) == NOTE
8056 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
8057 break;
8060 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
8061 if (GET_CODE (last) == CODE_LABEL)
8062 last = NEXT_INSN (last);
8063 reorder_insns (note, note, last);
8067 if ((len = VARRAY_SIZE (epilogue)) > 0)
8069 last = 0, note = 0;
8071 /* Scan from the end until we reach the first epilogue insn.
8072 We apparently can't depend on basic_block_{head,end} after
8073 reorg has run. */
8074 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8076 if (GET_CODE (insn) == NOTE)
8078 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
8079 note = insn;
8081 else if (contains (insn, epilogue))
8083 last = insn;
8084 if (--len == 0)
8085 break;
8089 if (last)
8091 /* Find the epilogue-begin note if we haven't already, and
8092 move it to just before the first epilogue insn. */
8093 if (note == 0)
8095 for (note = insn; (note = PREV_INSN (note));)
8096 if (GET_CODE (note) == NOTE
8097 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
8098 break;
8101 if (PREV_INSN (last) != note)
8102 reorder_insns (note, note, PREV_INSN (last));
8105 #endif /* HAVE_prologue or HAVE_epilogue */
8108 /* Called once, at initialization, to initialize function.c. */
8110 void
8111 init_function_once ()
8113 VARRAY_INT_INIT (prologue, 0, "prologue");
8114 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8115 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
8118 #include "gt-function.h"