2002-06-06 James Clark <jjc@jclark.com>
[official-gcc.git] / gcc / function.c
blob9924bb589b7474201128a075fdfef9abdb9a84b9
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "optabs.h"
52 #include "libfuncs.h"
53 #include "regs.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
56 #include "recog.h"
57 #include "output.h"
58 #include "basic-block.h"
59 #include "toplev.h"
60 #include "hashtab.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63 #include "integrate.h"
64 #include "langhooks.h"
66 #ifndef TRAMPOLINE_ALIGNMENT
67 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
68 #endif
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #endif
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
96 during rtl generation. If they are different register numbers, this is
97 always true. It may also be true if
98 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
99 generation. See fix_lexical_addr for details. */
101 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
102 #define NEED_SEPARATE_AP
103 #endif
105 /* Nonzero if function being compiled doesn't contain any calls
106 (ignoring the prologue and epilogue). This is set prior to
107 local register allocation and is valid for the remaining
108 compiler passes. */
109 int current_function_is_leaf;
111 /* Nonzero if function being compiled doesn't contain any instructions
112 that can throw an exception. This is set prior to final. */
114 int current_function_nothrow;
116 /* Nonzero if function being compiled doesn't modify the stack pointer
117 (ignoring the prologue and epilogue). This is only valid after
118 life_analysis has run. */
119 int current_function_sp_is_unchanging;
121 /* Nonzero if the function being compiled is a leaf function which only
122 uses leaf registers. This is valid after reload (specifically after
123 sched2) and is useful only if the port defines LEAF_REGISTERS. */
124 int current_function_uses_only_leaf_regs;
126 /* Nonzero once virtual register instantiation has been done.
127 assign_stack_local uses frame_pointer_rtx when this is nonzero.
128 calls.c:emit_library_call_value_1 uses it to set up
129 post-instantiation libcalls. */
130 int virtuals_instantiated;
132 /* Nonzero if at least one trampoline has been created. */
133 int trampolines_created;
135 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
136 static GTY(()) int funcdef_no;
138 /* These variables hold pointers to functions to create and destroy
139 target specific, per-function data structures. */
140 struct machine_function * (*init_machine_status) PARAMS ((void));
142 /* The FUNCTION_DECL for an inline function currently being expanded. */
143 tree inline_function_decl;
145 /* The currently compiled function. */
146 struct function *cfun = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static GTY(()) varray_type prologue;
150 static GTY(()) varray_type epilogue;
152 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
153 in this function. */
154 static GTY(()) varray_type sibcall_epilogue;
156 /* In order to evaluate some expressions, such as function calls returning
157 structures in memory, we need to temporarily allocate stack locations.
158 We record each allocated temporary in the following structure.
160 Associated with each temporary slot is a nesting level. When we pop up
161 one level, all temporaries associated with the previous level are freed.
162 Normally, all temporaries are freed after the execution of the statement
163 in which they were created. However, if we are inside a ({...}) grouping,
164 the result may be in a temporary and hence must be preserved. If the
165 result could be in a temporary, we preserve it if we can determine which
166 one it is in. If we cannot determine which temporary may contain the
167 result, all temporaries are preserved. A temporary is preserved by
168 pretending it was allocated at the previous nesting level.
170 Automatic variables are also assigned temporary slots, at the nesting
171 level where they are defined. They are marked a "kept" so that
172 free_temp_slots will not free them. */
174 struct temp_slot GTY(())
176 /* Points to next temporary slot. */
177 struct temp_slot *next;
178 /* The rtx to used to reference the slot. */
179 rtx slot;
180 /* The rtx used to represent the address if not the address of the
181 slot above. May be an EXPR_LIST if multiple addresses exist. */
182 rtx address;
183 /* The alignment (in bits) of the slot. */
184 unsigned int align;
185 /* The size, in units, of the slot. */
186 HOST_WIDE_INT size;
187 /* The type of the object in the slot, or zero if it doesn't correspond
188 to a type. We use this to determine whether a slot can be reused.
189 It can be reused if objects of the type of the new slot will always
190 conflict with objects of the type of the old slot. */
191 tree type;
192 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
193 tree rtl_expr;
194 /* Nonzero if this temporary is currently in use. */
195 char in_use;
196 /* Nonzero if this temporary has its address taken. */
197 char addr_taken;
198 /* Nesting level at which this slot is being used. */
199 int level;
200 /* Nonzero if this should survive a call to free_temp_slots. */
201 int keep;
202 /* The offset of the slot from the frame_pointer, including extra space
203 for alignment. This info is for combine_temp_slots. */
204 HOST_WIDE_INT base_offset;
205 /* The size of the slot, including extra space for alignment. This
206 info is for combine_temp_slots. */
207 HOST_WIDE_INT full_size;
210 /* This structure is used to record MEMs or pseudos used to replace VAR, any
211 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
212 maintain this list in case two operands of an insn were required to match;
213 in that case we must ensure we use the same replacement. */
215 struct fixup_replacement GTY(())
217 rtx old;
218 rtx new;
219 struct fixup_replacement *next;
222 struct insns_for_mem_entry
224 /* A MEM. */
225 rtx key;
226 /* These are the INSNs which reference the MEM. */
227 rtx insns;
230 /* Forward declarations. */
232 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
233 int, struct function *));
234 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
235 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
236 enum machine_mode, enum machine_mode,
237 int, unsigned int, int,
238 htab_t));
239 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
240 enum machine_mode,
241 htab_t));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
243 htab_t));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
247 int, int, rtx));
248 static void fixup_var_refs_insns_with_hash
249 PARAMS ((htab_t, rtx,
250 enum machine_mode, int, rtx));
251 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
252 int, int, rtx));
253 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
254 struct fixup_replacement **, rtx));
255 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
256 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
257 int));
258 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
259 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
260 static void instantiate_decls PARAMS ((tree, int));
261 static void instantiate_decls_1 PARAMS ((tree, int));
262 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
263 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
264 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
265 static void delete_handlers PARAMS ((void));
266 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
267 struct args_size *));
268 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
269 tree));
270 static rtx round_trampoline_addr PARAMS ((rtx));
271 static rtx adjust_trampoline_addr PARAMS ((rtx));
272 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
273 static void reorder_blocks_0 PARAMS ((tree));
274 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
275 static void reorder_fix_fragments PARAMS ((tree));
276 static tree blocks_nreverse PARAMS ((tree));
277 static int all_blocks PARAMS ((tree, tree *));
278 static tree *get_block_vector PARAMS ((tree, int *));
279 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
280 /* We always define `record_insns' even if its not used so that we
281 can always export `prologue_epilogue_contains'. */
282 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
283 static int contains PARAMS ((rtx, varray_type));
284 #ifdef HAVE_return
285 static void emit_return_into_block PARAMS ((basic_block, rtx));
286 #endif
287 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
288 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int, int, htab_t));
289 static void purge_single_hard_subreg_set PARAMS ((rtx));
290 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
291 static rtx keep_stack_depressed PARAMS ((rtx));
292 #endif
293 static int is_addressof PARAMS ((rtx *, void *));
294 static hashval_t insns_for_mem_hash PARAMS ((const void *));
295 static int insns_for_mem_comp PARAMS ((const void *, const void *));
296 static int insns_for_mem_walk PARAMS ((rtx *, void *));
297 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
298 static void prepare_function_start PARAMS ((void));
299 static void do_clobber_return_reg PARAMS ((rtx, void *));
300 static void do_use_return_reg PARAMS ((rtx, void *));
301 static void instantiate_virtual_regs_lossage PARAMS ((rtx));
302 static tree split_complex_args (tree);
304 /* Pointer to chain of `struct function' for containing functions. */
305 static GTY(()) struct function *outer_function_chain;
307 /* List of insns that were postponed by purge_addressof_1. */
308 static rtx postponed_insns;
310 /* Given a function decl for a containing function,
311 return the `struct function' for it. */
313 struct function *
314 find_function_data (decl)
315 tree decl;
317 struct function *p;
319 for (p = outer_function_chain; p; p = p->outer)
320 if (p->decl == decl)
321 return p;
323 abort ();
326 /* Save the current context for compilation of a nested function.
327 This is called from language-specific code. The caller should use
328 the enter_nested langhook to save any language-specific state,
329 since this function knows only about language-independent
330 variables. */
332 void
333 push_function_context_to (context)
334 tree context;
336 struct function *p;
338 if (context)
340 if (context == current_function_decl)
341 cfun->contains_functions = 1;
342 else
344 struct function *containing = find_function_data (context);
345 containing->contains_functions = 1;
349 if (cfun == 0)
350 init_dummy_function_start ();
351 p = cfun;
353 p->outer = outer_function_chain;
354 outer_function_chain = p;
355 p->fixup_var_refs_queue = 0;
357 (*lang_hooks.function.enter_nested) (p);
359 cfun = 0;
362 void
363 push_function_context ()
365 push_function_context_to (current_function_decl);
368 /* Restore the last saved context, at the end of a nested function.
369 This function is called from language-specific code. */
371 void
372 pop_function_context_from (context)
373 tree context ATTRIBUTE_UNUSED;
375 struct function *p = outer_function_chain;
376 struct var_refs_queue *queue;
378 cfun = p;
379 outer_function_chain = p->outer;
381 current_function_decl = p->decl;
382 reg_renumber = 0;
384 restore_emit_status (p);
386 (*lang_hooks.function.leave_nested) (p);
388 /* Finish doing put_var_into_stack for any of our variables which became
389 addressable during the nested function. If only one entry has to be
390 fixed up, just do that one. Otherwise, first make a list of MEMs that
391 are not to be unshared. */
392 if (p->fixup_var_refs_queue == 0)
394 else if (p->fixup_var_refs_queue->next == 0)
395 fixup_var_refs (p->fixup_var_refs_queue->modified,
396 p->fixup_var_refs_queue->promoted_mode,
397 p->fixup_var_refs_queue->unsignedp,
398 p->fixup_var_refs_queue->modified, 0);
399 else
401 rtx list = 0;
403 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
404 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
406 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
407 fixup_var_refs (queue->modified, queue->promoted_mode,
408 queue->unsignedp, list, 0);
412 p->fixup_var_refs_queue = 0;
414 /* Reset variables that have known state during rtx generation. */
415 rtx_equal_function_value_matters = 1;
416 virtuals_instantiated = 0;
417 generating_concat_p = 1;
420 void
421 pop_function_context ()
423 pop_function_context_from (current_function_decl);
426 /* Clear out all parts of the state in F that can safely be discarded
427 after the function has been parsed, but not compiled, to let
428 garbage collection reclaim the memory. */
430 void
431 free_after_parsing (f)
432 struct function *f;
434 /* f->expr->forced_labels is used by code generation. */
435 /* f->emit->regno_reg_rtx is used by code generation. */
436 /* f->varasm is used by code generation. */
437 /* f->eh->eh_return_stub_label is used by code generation. */
439 (*lang_hooks.function.final) (f);
440 f->stmt = NULL;
443 /* Clear out all parts of the state in F that can safely be discarded
444 after the function has been compiled, to let garbage collection
445 reclaim the memory. */
447 void
448 free_after_compilation (f)
449 struct function *f;
451 f->eh = NULL;
452 f->expr = NULL;
453 f->emit = NULL;
454 f->varasm = NULL;
455 f->machine = NULL;
457 f->x_temp_slots = NULL;
458 f->arg_offset_rtx = NULL;
459 f->return_rtx = NULL;
460 f->internal_arg_pointer = NULL;
461 f->x_nonlocal_labels = NULL;
462 f->x_nonlocal_goto_handler_slots = NULL;
463 f->x_nonlocal_goto_handler_labels = NULL;
464 f->x_nonlocal_goto_stack_level = NULL;
465 f->x_cleanup_label = NULL;
466 f->x_return_label = NULL;
467 f->computed_goto_common_label = NULL;
468 f->computed_goto_common_reg = NULL;
469 f->x_save_expr_regs = NULL;
470 f->x_stack_slot_list = NULL;
471 f->x_rtl_expr_chain = NULL;
472 f->x_tail_recursion_label = NULL;
473 f->x_tail_recursion_reentry = NULL;
474 f->x_arg_pointer_save_area = NULL;
475 f->x_clobber_return_insn = NULL;
476 f->x_context_display = NULL;
477 f->x_trampoline_list = NULL;
478 f->x_parm_birth_insn = NULL;
479 f->x_last_parm_insn = NULL;
480 f->x_parm_reg_stack_loc = NULL;
481 f->fixup_var_refs_queue = NULL;
482 f->original_arg_vector = NULL;
483 f->original_decl_initial = NULL;
484 f->inl_last_parm_insn = NULL;
485 f->epilogue_delay_list = NULL;
488 /* Allocate fixed slots in the stack frame of the current function. */
490 /* Return size needed for stack frame based on slots so far allocated in
491 function F.
492 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
493 the caller may have to do that. */
495 HOST_WIDE_INT
496 get_func_frame_size (f)
497 struct function *f;
499 #ifdef FRAME_GROWS_DOWNWARD
500 return -f->x_frame_offset;
501 #else
502 return f->x_frame_offset;
503 #endif
506 /* Return size needed for stack frame based on slots so far allocated.
507 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
508 the caller may have to do that. */
509 HOST_WIDE_INT
510 get_frame_size ()
512 return get_func_frame_size (cfun);
515 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
516 with machine mode MODE.
518 ALIGN controls the amount of alignment for the address of the slot:
519 0 means according to MODE,
520 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
521 positive specifies alignment boundary in bits.
523 We do not round to stack_boundary here.
525 FUNCTION specifies the function to allocate in. */
527 static rtx
528 assign_stack_local_1 (mode, size, align, function)
529 enum machine_mode mode;
530 HOST_WIDE_INT size;
531 int align;
532 struct function *function;
534 rtx x, addr;
535 int bigend_correction = 0;
536 int alignment;
537 int frame_off, frame_alignment, frame_phase;
539 if (align == 0)
541 tree type;
543 if (mode == BLKmode)
544 alignment = BIGGEST_ALIGNMENT;
545 else
546 alignment = GET_MODE_ALIGNMENT (mode);
548 /* Allow the target to (possibly) increase the alignment of this
549 stack slot. */
550 type = (*lang_hooks.types.type_for_mode) (mode, 0);
551 if (type)
552 alignment = LOCAL_ALIGNMENT (type, alignment);
554 alignment /= BITS_PER_UNIT;
556 else if (align == -1)
558 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
559 size = CEIL_ROUND (size, alignment);
561 else
562 alignment = align / BITS_PER_UNIT;
564 #ifdef FRAME_GROWS_DOWNWARD
565 function->x_frame_offset -= size;
566 #endif
568 /* Ignore alignment we can't do with expected alignment of the boundary. */
569 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
570 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
572 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
573 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
575 /* Calculate how many bytes the start of local variables is off from
576 stack alignment. */
577 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
578 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
579 frame_phase = frame_off ? frame_alignment - frame_off : 0;
581 /* Round the frame offset to the specified alignment. The default is
582 to always honor requests to align the stack but a port may choose to
583 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
584 if (STACK_ALIGNMENT_NEEDED
585 || mode != BLKmode
586 || size != 0)
588 /* We must be careful here, since FRAME_OFFSET might be negative and
589 division with a negative dividend isn't as well defined as we might
590 like. So we instead assume that ALIGNMENT is a power of two and
591 use logical operations which are unambiguous. */
592 #ifdef FRAME_GROWS_DOWNWARD
593 function->x_frame_offset
594 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
595 + frame_phase);
596 #else
597 function->x_frame_offset
598 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
599 + frame_phase);
600 #endif
603 /* On a big-endian machine, if we are allocating more space than we will use,
604 use the least significant bytes of those that are allocated. */
605 if (BYTES_BIG_ENDIAN && mode != BLKmode)
606 bigend_correction = size - GET_MODE_SIZE (mode);
608 /* If we have already instantiated virtual registers, return the actual
609 address relative to the frame pointer. */
610 if (function == cfun && virtuals_instantiated)
611 addr = plus_constant (frame_pointer_rtx,
612 trunc_int_for_mode
613 (frame_offset + bigend_correction
614 + STARTING_FRAME_OFFSET, Pmode));
615 else
616 addr = plus_constant (virtual_stack_vars_rtx,
617 trunc_int_for_mode
618 (function->x_frame_offset + bigend_correction,
619 Pmode));
621 #ifndef FRAME_GROWS_DOWNWARD
622 function->x_frame_offset += size;
623 #endif
625 x = gen_rtx_MEM (mode, addr);
627 function->x_stack_slot_list
628 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
630 return x;
633 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
634 current function. */
637 assign_stack_local (mode, size, align)
638 enum machine_mode mode;
639 HOST_WIDE_INT size;
640 int align;
642 return assign_stack_local_1 (mode, size, align, cfun);
645 /* Allocate a temporary stack slot and record it for possible later
646 reuse.
648 MODE is the machine mode to be given to the returned rtx.
650 SIZE is the size in units of the space required. We do no rounding here
651 since assign_stack_local will do any required rounding.
653 KEEP is 1 if this slot is to be retained after a call to
654 free_temp_slots. Automatic variables for a block are allocated
655 with this flag. KEEP is 2 if we allocate a longer term temporary,
656 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
657 if we are to allocate something at an inner level to be treated as
658 a variable in the block (e.g., a SAVE_EXPR).
660 TYPE is the type that will be used for the stack slot. */
663 assign_stack_temp_for_type (mode, size, keep, type)
664 enum machine_mode mode;
665 HOST_WIDE_INT size;
666 int keep;
667 tree type;
669 unsigned int align;
670 struct temp_slot *p, *best_p = 0;
671 rtx slot;
673 /* If SIZE is -1 it means that somebody tried to allocate a temporary
674 of a variable size. */
675 if (size == -1)
676 abort ();
678 if (mode == BLKmode)
679 align = BIGGEST_ALIGNMENT;
680 else
681 align = GET_MODE_ALIGNMENT (mode);
683 if (! type)
684 type = (*lang_hooks.types.type_for_mode) (mode, 0);
686 if (type)
687 align = LOCAL_ALIGNMENT (type, align);
689 /* Try to find an available, already-allocated temporary of the proper
690 mode which meets the size and alignment requirements. Choose the
691 smallest one with the closest alignment. */
692 for (p = temp_slots; p; p = p->next)
693 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
694 && ! p->in_use
695 && objects_must_conflict_p (p->type, type)
696 && (best_p == 0 || best_p->size > p->size
697 || (best_p->size == p->size && best_p->align > p->align)))
699 if (p->align == align && p->size == size)
701 best_p = 0;
702 break;
704 best_p = p;
707 /* Make our best, if any, the one to use. */
708 if (best_p)
710 /* If there are enough aligned bytes left over, make them into a new
711 temp_slot so that the extra bytes don't get wasted. Do this only
712 for BLKmode slots, so that we can be sure of the alignment. */
713 if (GET_MODE (best_p->slot) == BLKmode)
715 int alignment = best_p->align / BITS_PER_UNIT;
716 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
718 if (best_p->size - rounded_size >= alignment)
720 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
721 p->in_use = p->addr_taken = 0;
722 p->size = best_p->size - rounded_size;
723 p->base_offset = best_p->base_offset + rounded_size;
724 p->full_size = best_p->full_size - rounded_size;
725 p->slot = gen_rtx_MEM (BLKmode,
726 plus_constant (XEXP (best_p->slot, 0),
727 rounded_size));
728 p->align = best_p->align;
729 p->address = 0;
730 p->rtl_expr = 0;
731 p->type = best_p->type;
732 p->next = temp_slots;
733 temp_slots = p;
735 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
736 stack_slot_list);
738 best_p->size = rounded_size;
739 best_p->full_size = rounded_size;
743 p = best_p;
746 /* If we still didn't find one, make a new temporary. */
747 if (p == 0)
749 HOST_WIDE_INT frame_offset_old = frame_offset;
751 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
753 /* We are passing an explicit alignment request to assign_stack_local.
754 One side effect of that is assign_stack_local will not round SIZE
755 to ensure the frame offset remains suitably aligned.
757 So for requests which depended on the rounding of SIZE, we go ahead
758 and round it now. We also make sure ALIGNMENT is at least
759 BIGGEST_ALIGNMENT. */
760 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
761 abort ();
762 p->slot = assign_stack_local (mode,
763 (mode == BLKmode
764 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
765 : size),
766 align);
768 p->align = align;
770 /* The following slot size computation is necessary because we don't
771 know the actual size of the temporary slot until assign_stack_local
772 has performed all the frame alignment and size rounding for the
773 requested temporary. Note that extra space added for alignment
774 can be either above or below this stack slot depending on which
775 way the frame grows. We include the extra space if and only if it
776 is above this slot. */
777 #ifdef FRAME_GROWS_DOWNWARD
778 p->size = frame_offset_old - frame_offset;
779 #else
780 p->size = size;
781 #endif
783 /* Now define the fields used by combine_temp_slots. */
784 #ifdef FRAME_GROWS_DOWNWARD
785 p->base_offset = frame_offset;
786 p->full_size = frame_offset_old - frame_offset;
787 #else
788 p->base_offset = frame_offset_old;
789 p->full_size = frame_offset - frame_offset_old;
790 #endif
791 p->address = 0;
792 p->next = temp_slots;
793 temp_slots = p;
796 p->in_use = 1;
797 p->addr_taken = 0;
798 p->rtl_expr = seq_rtl_expr;
799 p->type = type;
801 if (keep == 2)
803 p->level = target_temp_slot_level;
804 p->keep = 0;
806 else if (keep == 3)
808 p->level = var_temp_slot_level;
809 p->keep = 0;
811 else
813 p->level = temp_slot_level;
814 p->keep = keep;
818 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
819 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
820 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
822 /* If we know the alias set for the memory that will be used, use
823 it. If there's no TYPE, then we don't know anything about the
824 alias set for the memory. */
825 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
826 set_mem_align (slot, align);
828 /* If a type is specified, set the relevant flags. */
829 if (type != 0)
831 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
832 && TYPE_READONLY (type));
833 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
834 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
837 return slot;
840 /* Allocate a temporary stack slot and record it for possible later
841 reuse. First three arguments are same as in preceding function. */
844 assign_stack_temp (mode, size, keep)
845 enum machine_mode mode;
846 HOST_WIDE_INT size;
847 int keep;
849 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
852 /* Assign a temporary.
853 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
854 and so that should be used in error messages. In either case, we
855 allocate of the given type.
856 KEEP is as for assign_stack_temp.
857 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
858 it is 0 if a register is OK.
859 DONT_PROMOTE is 1 if we should not promote values in register
860 to wider modes. */
863 assign_temp (type_or_decl, keep, memory_required, dont_promote)
864 tree type_or_decl;
865 int keep;
866 int memory_required;
867 int dont_promote ATTRIBUTE_UNUSED;
869 tree type, decl;
870 enum machine_mode mode;
871 #ifndef PROMOTE_FOR_CALL_ONLY
872 int unsignedp;
873 #endif
875 if (DECL_P (type_or_decl))
876 decl = type_or_decl, type = TREE_TYPE (decl);
877 else
878 decl = NULL, type = type_or_decl;
880 mode = TYPE_MODE (type);
881 #ifndef PROMOTE_FOR_CALL_ONLY
882 unsignedp = TREE_UNSIGNED (type);
883 #endif
885 if (mode == BLKmode || memory_required)
887 HOST_WIDE_INT size = int_size_in_bytes (type);
888 rtx tmp;
890 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
891 problems with allocating the stack space. */
892 if (size == 0)
893 size = 1;
895 /* Unfortunately, we don't yet know how to allocate variable-sized
896 temporaries. However, sometimes we have a fixed upper limit on
897 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
898 instead. This is the case for Chill variable-sized strings. */
899 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
900 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
901 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
902 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
904 /* The size of the temporary may be too large to fit into an integer. */
905 /* ??? Not sure this should happen except for user silliness, so limit
906 this to things that aren't compiler-generated temporaries. The
907 rest of the time we'll abort in assign_stack_temp_for_type. */
908 if (decl && size == -1
909 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
911 error_with_decl (decl, "size of variable `%s' is too large");
912 size = 1;
915 tmp = assign_stack_temp_for_type (mode, size, keep, type);
916 return tmp;
919 #ifndef PROMOTE_FOR_CALL_ONLY
920 if (! dont_promote)
921 mode = promote_mode (type, mode, &unsignedp, 0);
922 #endif
924 return gen_reg_rtx (mode);
927 /* Combine temporary stack slots which are adjacent on the stack.
929 This allows for better use of already allocated stack space. This is only
930 done for BLKmode slots because we can be sure that we won't have alignment
931 problems in this case. */
933 void
934 combine_temp_slots ()
936 struct temp_slot *p, *q;
937 struct temp_slot *prev_p, *prev_q;
938 int num_slots;
940 /* We can't combine slots, because the information about which slot
941 is in which alias set will be lost. */
942 if (flag_strict_aliasing)
943 return;
945 /* If there are a lot of temp slots, don't do anything unless
946 high levels of optimization. */
947 if (! flag_expensive_optimizations)
948 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
949 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
950 return;
952 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
954 int delete_p = 0;
956 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
957 for (q = p->next, prev_q = p; q; q = prev_q->next)
959 int delete_q = 0;
960 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
962 if (p->base_offset + p->full_size == q->base_offset)
964 /* Q comes after P; combine Q into P. */
965 p->size += q->size;
966 p->full_size += q->full_size;
967 delete_q = 1;
969 else if (q->base_offset + q->full_size == p->base_offset)
971 /* P comes after Q; combine P into Q. */
972 q->size += p->size;
973 q->full_size += p->full_size;
974 delete_p = 1;
975 break;
978 /* Either delete Q or advance past it. */
979 if (delete_q)
980 prev_q->next = q->next;
981 else
982 prev_q = q;
984 /* Either delete P or advance past it. */
985 if (delete_p)
987 if (prev_p)
988 prev_p->next = p->next;
989 else
990 temp_slots = p->next;
992 else
993 prev_p = p;
997 /* Find the temp slot corresponding to the object at address X. */
999 static struct temp_slot *
1000 find_temp_slot_from_address (x)
1001 rtx x;
1003 struct temp_slot *p;
1004 rtx next;
1006 for (p = temp_slots; p; p = p->next)
1008 if (! p->in_use)
1009 continue;
1011 else if (XEXP (p->slot, 0) == x
1012 || p->address == x
1013 || (GET_CODE (x) == PLUS
1014 && XEXP (x, 0) == virtual_stack_vars_rtx
1015 && GET_CODE (XEXP (x, 1)) == CONST_INT
1016 && INTVAL (XEXP (x, 1)) >= p->base_offset
1017 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1018 return p;
1020 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1021 for (next = p->address; next; next = XEXP (next, 1))
1022 if (XEXP (next, 0) == x)
1023 return p;
1026 /* If we have a sum involving a register, see if it points to a temp
1027 slot. */
1028 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1029 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1030 return p;
1031 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1032 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1033 return p;
1035 return 0;
1038 /* Indicate that NEW is an alternate way of referring to the temp slot
1039 that previously was known by OLD. */
1041 void
1042 update_temp_slot_address (old, new)
1043 rtx old, new;
1045 struct temp_slot *p;
1047 if (rtx_equal_p (old, new))
1048 return;
1050 p = find_temp_slot_from_address (old);
1052 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1053 is a register, see if one operand of the PLUS is a temporary
1054 location. If so, NEW points into it. Otherwise, if both OLD and
1055 NEW are a PLUS and if there is a register in common between them.
1056 If so, try a recursive call on those values. */
1057 if (p == 0)
1059 if (GET_CODE (old) != PLUS)
1060 return;
1062 if (GET_CODE (new) == REG)
1064 update_temp_slot_address (XEXP (old, 0), new);
1065 update_temp_slot_address (XEXP (old, 1), new);
1066 return;
1068 else if (GET_CODE (new) != PLUS)
1069 return;
1071 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1072 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1073 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1074 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1075 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1076 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1077 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1078 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1080 return;
1083 /* Otherwise add an alias for the temp's address. */
1084 else if (p->address == 0)
1085 p->address = new;
1086 else
1088 if (GET_CODE (p->address) != EXPR_LIST)
1089 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1091 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1095 /* If X could be a reference to a temporary slot, mark the fact that its
1096 address was taken. */
1098 void
1099 mark_temp_addr_taken (x)
1100 rtx x;
1102 struct temp_slot *p;
1104 if (x == 0)
1105 return;
1107 /* If X is not in memory or is at a constant address, it cannot be in
1108 a temporary slot. */
1109 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1110 return;
1112 p = find_temp_slot_from_address (XEXP (x, 0));
1113 if (p != 0)
1114 p->addr_taken = 1;
1117 /* If X could be a reference to a temporary slot, mark that slot as
1118 belonging to the to one level higher than the current level. If X
1119 matched one of our slots, just mark that one. Otherwise, we can't
1120 easily predict which it is, so upgrade all of them. Kept slots
1121 need not be touched.
1123 This is called when an ({...}) construct occurs and a statement
1124 returns a value in memory. */
1126 void
1127 preserve_temp_slots (x)
1128 rtx x;
1130 struct temp_slot *p = 0;
1132 /* If there is no result, we still might have some objects whose address
1133 were taken, so we need to make sure they stay around. */
1134 if (x == 0)
1136 for (p = temp_slots; p; p = p->next)
1137 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1138 p->level--;
1140 return;
1143 /* If X is a register that is being used as a pointer, see if we have
1144 a temporary slot we know it points to. To be consistent with
1145 the code below, we really should preserve all non-kept slots
1146 if we can't find a match, but that seems to be much too costly. */
1147 if (GET_CODE (x) == REG && REG_POINTER (x))
1148 p = find_temp_slot_from_address (x);
1150 /* If X is not in memory or is at a constant address, it cannot be in
1151 a temporary slot, but it can contain something whose address was
1152 taken. */
1153 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1155 for (p = temp_slots; p; p = p->next)
1156 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1157 p->level--;
1159 return;
1162 /* First see if we can find a match. */
1163 if (p == 0)
1164 p = find_temp_slot_from_address (XEXP (x, 0));
1166 if (p != 0)
1168 /* Move everything at our level whose address was taken to our new
1169 level in case we used its address. */
1170 struct temp_slot *q;
1172 if (p->level == temp_slot_level)
1174 for (q = temp_slots; q; q = q->next)
1175 if (q != p && q->addr_taken && q->level == p->level)
1176 q->level--;
1178 p->level--;
1179 p->addr_taken = 0;
1181 return;
1184 /* Otherwise, preserve all non-kept slots at this level. */
1185 for (p = temp_slots; p; p = p->next)
1186 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1187 p->level--;
1190 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1191 with that RTL_EXPR, promote it into a temporary slot at the present
1192 level so it will not be freed when we free slots made in the
1193 RTL_EXPR. */
1195 void
1196 preserve_rtl_expr_result (x)
1197 rtx x;
1199 struct temp_slot *p;
1201 /* If X is not in memory or is at a constant address, it cannot be in
1202 a temporary slot. */
1203 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1204 return;
1206 /* If we can find a match, move it to our level unless it is already at
1207 an upper level. */
1208 p = find_temp_slot_from_address (XEXP (x, 0));
1209 if (p != 0)
1211 p->level = MIN (p->level, temp_slot_level);
1212 p->rtl_expr = 0;
1215 return;
1218 /* Free all temporaries used so far. This is normally called at the end
1219 of generating code for a statement. Don't free any temporaries
1220 currently in use for an RTL_EXPR that hasn't yet been emitted.
1221 We could eventually do better than this since it can be reused while
1222 generating the same RTL_EXPR, but this is complex and probably not
1223 worthwhile. */
1225 void
1226 free_temp_slots ()
1228 struct temp_slot *p;
1230 for (p = temp_slots; p; p = p->next)
1231 if (p->in_use && p->level == temp_slot_level && ! p->keep
1232 && p->rtl_expr == 0)
1233 p->in_use = 0;
1235 combine_temp_slots ();
1238 /* Free all temporary slots used in T, an RTL_EXPR node. */
1240 void
1241 free_temps_for_rtl_expr (t)
1242 tree t;
1244 struct temp_slot *p;
1246 for (p = temp_slots; p; p = p->next)
1247 if (p->rtl_expr == t)
1249 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1250 needs to be preserved. This can happen if a temporary in
1251 the RTL_EXPR was addressed; preserve_temp_slots will move
1252 the temporary into a higher level. */
1253 if (temp_slot_level <= p->level)
1254 p->in_use = 0;
1255 else
1256 p->rtl_expr = NULL_TREE;
1259 combine_temp_slots ();
1262 /* Mark all temporaries ever allocated in this function as not suitable
1263 for reuse until the current level is exited. */
1265 void
1266 mark_all_temps_used ()
1268 struct temp_slot *p;
1270 for (p = temp_slots; p; p = p->next)
1272 p->in_use = p->keep = 1;
1273 p->level = MIN (p->level, temp_slot_level);
1277 /* Push deeper into the nesting level for stack temporaries. */
1279 void
1280 push_temp_slots ()
1282 temp_slot_level++;
1285 /* Pop a temporary nesting level. All slots in use in the current level
1286 are freed. */
1288 void
1289 pop_temp_slots ()
1291 struct temp_slot *p;
1293 for (p = temp_slots; p; p = p->next)
1294 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1295 p->in_use = 0;
1297 combine_temp_slots ();
1299 temp_slot_level--;
1302 /* Initialize temporary slots. */
1304 void
1305 init_temp_slots ()
1307 /* We have not allocated any temporaries yet. */
1308 temp_slots = 0;
1309 temp_slot_level = 0;
1310 var_temp_slot_level = 0;
1311 target_temp_slot_level = 0;
1314 /* Retroactively move an auto variable from a register to a stack
1315 slot. This is done when an address-reference to the variable is
1316 seen. If RESCAN is true, all previously emitted instructions are
1317 examined and modified to handle the fact that DECL is now
1318 addressable. */
1320 void
1321 put_var_into_stack (decl, rescan)
1322 tree decl;
1323 int rescan;
1325 rtx reg;
1326 enum machine_mode promoted_mode, decl_mode;
1327 struct function *function = 0;
1328 tree context;
1329 int can_use_addressof;
1330 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1331 int usedp = (TREE_USED (decl)
1332 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1334 context = decl_function_context (decl);
1336 /* Get the current rtl used for this object and its original mode. */
1337 reg = (TREE_CODE (decl) == SAVE_EXPR
1338 ? SAVE_EXPR_RTL (decl)
1339 : DECL_RTL_IF_SET (decl));
1341 /* No need to do anything if decl has no rtx yet
1342 since in that case caller is setting TREE_ADDRESSABLE
1343 and a stack slot will be assigned when the rtl is made. */
1344 if (reg == 0)
1345 return;
1347 /* Get the declared mode for this object. */
1348 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1349 : DECL_MODE (decl));
1350 /* Get the mode it's actually stored in. */
1351 promoted_mode = GET_MODE (reg);
1353 /* If this variable comes from an outer function, find that
1354 function's saved context. Don't use find_function_data here,
1355 because it might not be in any active function.
1356 FIXME: Is that really supposed to happen?
1357 It does in ObjC at least. */
1358 if (context != current_function_decl && context != inline_function_decl)
1359 for (function = outer_function_chain; function; function = function->outer)
1360 if (function->decl == context)
1361 break;
1363 /* If this is a variable-size object with a pseudo to address it,
1364 put that pseudo into the stack, if the var is nonlocal. */
1365 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1366 && GET_CODE (reg) == MEM
1367 && GET_CODE (XEXP (reg, 0)) == REG
1368 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1370 reg = XEXP (reg, 0);
1371 decl_mode = promoted_mode = GET_MODE (reg);
1374 can_use_addressof
1375 = (function == 0
1376 && optimize > 0
1377 /* FIXME make it work for promoted modes too */
1378 && decl_mode == promoted_mode
1379 #ifdef NON_SAVING_SETJMP
1380 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1381 #endif
1384 /* If we can't use ADDRESSOF, make sure we see through one we already
1385 generated. */
1386 if (! can_use_addressof && GET_CODE (reg) == MEM
1387 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1388 reg = XEXP (XEXP (reg, 0), 0);
1390 /* Now we should have a value that resides in one or more pseudo regs. */
1392 if (GET_CODE (reg) == REG)
1394 /* If this variable lives in the current function and we don't need
1395 to put things in the stack for the sake of setjmp, try to keep it
1396 in a register until we know we actually need the address. */
1397 if (can_use_addressof)
1398 gen_mem_addressof (reg, decl, rescan);
1399 else
1400 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1401 decl_mode, volatilep, 0, usedp, 0);
1403 else if (GET_CODE (reg) == CONCAT)
1405 /* A CONCAT contains two pseudos; put them both in the stack.
1406 We do it so they end up consecutive.
1407 We fixup references to the parts only after we fixup references
1408 to the whole CONCAT, lest we do double fixups for the latter
1409 references. */
1410 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1411 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1412 rtx lopart = XEXP (reg, 0);
1413 rtx hipart = XEXP (reg, 1);
1414 #ifdef FRAME_GROWS_DOWNWARD
1415 /* Since part 0 should have a lower address, do it second. */
1416 put_reg_into_stack (function, hipart, part_type, part_mode,
1417 part_mode, volatilep, 0, 0, 0);
1418 put_reg_into_stack (function, lopart, part_type, part_mode,
1419 part_mode, volatilep, 0, 0, 0);
1420 #else
1421 put_reg_into_stack (function, lopart, part_type, part_mode,
1422 part_mode, volatilep, 0, 0, 0);
1423 put_reg_into_stack (function, hipart, part_type, part_mode,
1424 part_mode, volatilep, 0, 0, 0);
1425 #endif
1427 /* Change the CONCAT into a combined MEM for both parts. */
1428 PUT_CODE (reg, MEM);
1429 MEM_ATTRS (reg) = 0;
1431 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1432 already computed alias sets. Here we want to re-generate. */
1433 if (DECL_P (decl))
1434 SET_DECL_RTL (decl, NULL);
1435 set_mem_attributes (reg, decl, 1);
1436 if (DECL_P (decl))
1437 SET_DECL_RTL (decl, reg);
1439 /* The two parts are in memory order already.
1440 Use the lower parts address as ours. */
1441 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1442 /* Prevent sharing of rtl that might lose. */
1443 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1444 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1445 if (usedp && rescan)
1447 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1448 promoted_mode, 0);
1449 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1450 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1453 else
1454 return;
1457 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1458 into the stack frame of FUNCTION (0 means the current function).
1459 DECL_MODE is the machine mode of the user-level data type.
1460 PROMOTED_MODE is the machine mode of the register.
1461 VOLATILE_P is nonzero if this is for a "volatile" decl.
1462 USED_P is nonzero if this reg might have already been used in an insn. */
1464 static void
1465 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1466 original_regno, used_p, ht)
1467 struct function *function;
1468 rtx reg;
1469 tree type;
1470 enum machine_mode promoted_mode, decl_mode;
1471 int volatile_p;
1472 unsigned int original_regno;
1473 int used_p;
1474 htab_t ht;
1476 struct function *func = function ? function : cfun;
1477 rtx new = 0;
1478 unsigned int regno = original_regno;
1480 if (regno == 0)
1481 regno = REGNO (reg);
1483 if (regno < func->x_max_parm_reg)
1484 new = func->x_parm_reg_stack_loc[regno];
1486 if (new == 0)
1487 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1489 PUT_CODE (reg, MEM);
1490 PUT_MODE (reg, decl_mode);
1491 XEXP (reg, 0) = XEXP (new, 0);
1492 MEM_ATTRS (reg) = 0;
1493 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1494 MEM_VOLATILE_P (reg) = volatile_p;
1496 /* If this is a memory ref that contains aggregate components,
1497 mark it as such for cse and loop optimize. If we are reusing a
1498 previously generated stack slot, then we need to copy the bit in
1499 case it was set for other reasons. For instance, it is set for
1500 __builtin_va_alist. */
1501 if (type)
1503 MEM_SET_IN_STRUCT_P (reg,
1504 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1505 set_mem_alias_set (reg, get_alias_set (type));
1508 if (used_p)
1509 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1512 /* Make sure that all refs to the variable, previously made
1513 when it was a register, are fixed up to be valid again.
1514 See function above for meaning of arguments. */
1516 static void
1517 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1518 struct function *function;
1519 rtx reg;
1520 tree type;
1521 enum machine_mode promoted_mode;
1522 htab_t ht;
1524 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1526 if (function != 0)
1528 struct var_refs_queue *temp;
1530 temp
1531 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1532 temp->modified = reg;
1533 temp->promoted_mode = promoted_mode;
1534 temp->unsignedp = unsigned_p;
1535 temp->next = function->fixup_var_refs_queue;
1536 function->fixup_var_refs_queue = temp;
1538 else
1539 /* Variable is local; fix it up now. */
1540 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1543 static void
1544 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1545 rtx var;
1546 enum machine_mode promoted_mode;
1547 int unsignedp;
1548 htab_t ht;
1549 rtx may_share;
1551 tree pending;
1552 rtx first_insn = get_insns ();
1553 struct sequence_stack *stack = seq_stack;
1554 tree rtl_exps = rtl_expr_chain;
1556 /* If there's a hash table, it must record all uses of VAR. */
1557 if (ht)
1559 if (stack != 0)
1560 abort ();
1561 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1562 may_share);
1563 return;
1566 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1567 stack == 0, may_share);
1569 /* Scan all pending sequences too. */
1570 for (; stack; stack = stack->next)
1572 push_to_full_sequence (stack->first, stack->last);
1573 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1574 stack->next != 0, may_share);
1575 /* Update remembered end of sequence
1576 in case we added an insn at the end. */
1577 stack->last = get_last_insn ();
1578 end_sequence ();
1581 /* Scan all waiting RTL_EXPRs too. */
1582 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1584 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1585 if (seq != const0_rtx && seq != 0)
1587 push_to_sequence (seq);
1588 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1589 may_share);
1590 end_sequence ();
1595 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1596 some part of an insn. Return a struct fixup_replacement whose OLD
1597 value is equal to X. Allocate a new structure if no such entry exists. */
1599 static struct fixup_replacement *
1600 find_fixup_replacement (replacements, x)
1601 struct fixup_replacement **replacements;
1602 rtx x;
1604 struct fixup_replacement *p;
1606 /* See if we have already replaced this. */
1607 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1610 if (p == 0)
1612 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1613 p->old = x;
1614 p->new = 0;
1615 p->next = *replacements;
1616 *replacements = p;
1619 return p;
1622 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1623 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1624 for the current function. MAY_SHARE is either a MEM that is not
1625 to be unshared or a list of them. */
1627 static void
1628 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1629 rtx insn;
1630 rtx var;
1631 enum machine_mode promoted_mode;
1632 int unsignedp;
1633 int toplevel;
1634 rtx may_share;
1636 while (insn)
1638 /* fixup_var_refs_insn might modify insn, so save its next
1639 pointer now. */
1640 rtx next = NEXT_INSN (insn);
1642 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1643 the three sequences they (potentially) contain, and process
1644 them recursively. The CALL_INSN itself is not interesting. */
1646 if (GET_CODE (insn) == CALL_INSN
1647 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1649 int i;
1651 /* Look at the Normal call, sibling call and tail recursion
1652 sequences attached to the CALL_PLACEHOLDER. */
1653 for (i = 0; i < 3; i++)
1655 rtx seq = XEXP (PATTERN (insn), i);
1656 if (seq)
1658 push_to_sequence (seq);
1659 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1660 may_share);
1661 XEXP (PATTERN (insn), i) = get_insns ();
1662 end_sequence ();
1667 else if (INSN_P (insn))
1668 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1669 may_share);
1671 insn = next;
1675 /* Look up the insns which reference VAR in HT and fix them up. Other
1676 arguments are the same as fixup_var_refs_insns.
1678 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1679 because the hash table will point straight to the interesting insn
1680 (inside the CALL_PLACEHOLDER). */
1682 static void
1683 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1684 htab_t ht;
1685 rtx var;
1686 enum machine_mode promoted_mode;
1687 int unsignedp;
1688 rtx may_share;
1690 struct insns_for_mem_entry tmp;
1691 struct insns_for_mem_entry *ime;
1692 rtx insn_list;
1694 tmp.key = var;
1695 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1696 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1697 if (INSN_P (XEXP (insn_list, 0)))
1698 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1699 unsignedp, 1, may_share);
1703 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1704 the insn under examination, VAR is the variable to fix up
1705 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1706 TOPLEVEL is nonzero if this is the main insn chain for this
1707 function. */
1709 static void
1710 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1711 rtx insn;
1712 rtx var;
1713 enum machine_mode promoted_mode;
1714 int unsignedp;
1715 int toplevel;
1716 rtx no_share;
1718 rtx call_dest = 0;
1719 rtx set, prev, prev_set;
1720 rtx note;
1722 /* Remember the notes in case we delete the insn. */
1723 note = REG_NOTES (insn);
1725 /* If this is a CLOBBER of VAR, delete it.
1727 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1728 and REG_RETVAL notes too. */
1729 if (GET_CODE (PATTERN (insn)) == CLOBBER
1730 && (XEXP (PATTERN (insn), 0) == var
1731 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1732 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1733 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1735 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1736 /* The REG_LIBCALL note will go away since we are going to
1737 turn INSN into a NOTE, so just delete the
1738 corresponding REG_RETVAL note. */
1739 remove_note (XEXP (note, 0),
1740 find_reg_note (XEXP (note, 0), REG_RETVAL,
1741 NULL_RTX));
1743 delete_insn (insn);
1746 /* The insn to load VAR from a home in the arglist
1747 is now a no-op. When we see it, just delete it.
1748 Similarly if this is storing VAR from a register from which
1749 it was loaded in the previous insn. This will occur
1750 when an ADDRESSOF was made for an arglist slot. */
1751 else if (toplevel
1752 && (set = single_set (insn)) != 0
1753 && SET_DEST (set) == var
1754 /* If this represents the result of an insn group,
1755 don't delete the insn. */
1756 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1757 && (rtx_equal_p (SET_SRC (set), var)
1758 || (GET_CODE (SET_SRC (set)) == REG
1759 && (prev = prev_nonnote_insn (insn)) != 0
1760 && (prev_set = single_set (prev)) != 0
1761 && SET_DEST (prev_set) == SET_SRC (set)
1762 && rtx_equal_p (SET_SRC (prev_set), var))))
1764 delete_insn (insn);
1766 else
1768 struct fixup_replacement *replacements = 0;
1769 rtx next_insn = NEXT_INSN (insn);
1771 if (SMALL_REGISTER_CLASSES)
1773 /* If the insn that copies the results of a CALL_INSN
1774 into a pseudo now references VAR, we have to use an
1775 intermediate pseudo since we want the life of the
1776 return value register to be only a single insn.
1778 If we don't use an intermediate pseudo, such things as
1779 address computations to make the address of VAR valid
1780 if it is not can be placed between the CALL_INSN and INSN.
1782 To make sure this doesn't happen, we record the destination
1783 of the CALL_INSN and see if the next insn uses both that
1784 and VAR. */
1786 if (call_dest != 0 && GET_CODE (insn) == INSN
1787 && reg_mentioned_p (var, PATTERN (insn))
1788 && reg_mentioned_p (call_dest, PATTERN (insn)))
1790 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1792 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1794 PATTERN (insn) = replace_rtx (PATTERN (insn),
1795 call_dest, temp);
1798 if (GET_CODE (insn) == CALL_INSN
1799 && GET_CODE (PATTERN (insn)) == SET)
1800 call_dest = SET_DEST (PATTERN (insn));
1801 else if (GET_CODE (insn) == CALL_INSN
1802 && GET_CODE (PATTERN (insn)) == PARALLEL
1803 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1804 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1805 else
1806 call_dest = 0;
1809 /* See if we have to do anything to INSN now that VAR is in
1810 memory. If it needs to be loaded into a pseudo, use a single
1811 pseudo for the entire insn in case there is a MATCH_DUP
1812 between two operands. We pass a pointer to the head of
1813 a list of struct fixup_replacements. If fixup_var_refs_1
1814 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1815 it will record them in this list.
1817 If it allocated a pseudo for any replacement, we copy into
1818 it here. */
1820 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1821 &replacements, no_share);
1823 /* If this is last_parm_insn, and any instructions were output
1824 after it to fix it up, then we must set last_parm_insn to
1825 the last such instruction emitted. */
1826 if (insn == last_parm_insn)
1827 last_parm_insn = PREV_INSN (next_insn);
1829 while (replacements)
1831 struct fixup_replacement *next;
1833 if (GET_CODE (replacements->new) == REG)
1835 rtx insert_before;
1836 rtx seq;
1838 /* OLD might be a (subreg (mem)). */
1839 if (GET_CODE (replacements->old) == SUBREG)
1840 replacements->old
1841 = fixup_memory_subreg (replacements->old, insn,
1842 promoted_mode, 0);
1843 else
1844 replacements->old
1845 = fixup_stack_1 (replacements->old, insn);
1847 insert_before = insn;
1849 /* If we are changing the mode, do a conversion.
1850 This might be wasteful, but combine.c will
1851 eliminate much of the waste. */
1853 if (GET_MODE (replacements->new)
1854 != GET_MODE (replacements->old))
1856 start_sequence ();
1857 convert_move (replacements->new,
1858 replacements->old, unsignedp);
1859 seq = get_insns ();
1860 end_sequence ();
1862 else
1863 seq = gen_move_insn (replacements->new,
1864 replacements->old);
1866 emit_insn_before (seq, insert_before);
1869 next = replacements->next;
1870 free (replacements);
1871 replacements = next;
1875 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1876 But don't touch other insns referred to by reg-notes;
1877 we will get them elsewhere. */
1878 while (note)
1880 if (GET_CODE (note) != INSN_LIST)
1881 XEXP (note, 0)
1882 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1883 promoted_mode, 1);
1884 note = XEXP (note, 1);
1888 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1889 See if the rtx expression at *LOC in INSN needs to be changed.
1891 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1892 contain a list of original rtx's and replacements. If we find that we need
1893 to modify this insn by replacing a memory reference with a pseudo or by
1894 making a new MEM to implement a SUBREG, we consult that list to see if
1895 we have already chosen a replacement. If none has already been allocated,
1896 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1897 or the SUBREG, as appropriate, to the pseudo. */
1899 static void
1900 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1901 rtx var;
1902 enum machine_mode promoted_mode;
1903 rtx *loc;
1904 rtx insn;
1905 struct fixup_replacement **replacements;
1906 rtx no_share;
1908 int i;
1909 rtx x = *loc;
1910 RTX_CODE code = GET_CODE (x);
1911 const char *fmt;
1912 rtx tem, tem1;
1913 struct fixup_replacement *replacement;
1915 switch (code)
1917 case ADDRESSOF:
1918 if (XEXP (x, 0) == var)
1920 /* Prevent sharing of rtl that might lose. */
1921 rtx sub = copy_rtx (XEXP (var, 0));
1923 if (! validate_change (insn, loc, sub, 0))
1925 rtx y = gen_reg_rtx (GET_MODE (sub));
1926 rtx seq, new_insn;
1928 /* We should be able to replace with a register or all is lost.
1929 Note that we can't use validate_change to verify this, since
1930 we're not caring for replacing all dups simultaneously. */
1931 if (! validate_replace_rtx (*loc, y, insn))
1932 abort ();
1934 /* Careful! First try to recognize a direct move of the
1935 value, mimicking how things are done in gen_reload wrt
1936 PLUS. Consider what happens when insn is a conditional
1937 move instruction and addsi3 clobbers flags. */
1939 start_sequence ();
1940 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1941 seq = get_insns ();
1942 end_sequence ();
1944 if (recog_memoized (new_insn) < 0)
1946 /* That failed. Fall back on force_operand and hope. */
1948 start_sequence ();
1949 sub = force_operand (sub, y);
1950 if (sub != y)
1951 emit_insn (gen_move_insn (y, sub));
1952 seq = get_insns ();
1953 end_sequence ();
1956 #ifdef HAVE_cc0
1957 /* Don't separate setter from user. */
1958 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1959 insn = PREV_INSN (insn);
1960 #endif
1962 emit_insn_before (seq, insn);
1965 return;
1967 case MEM:
1968 if (var == x)
1970 /* If we already have a replacement, use it. Otherwise,
1971 try to fix up this address in case it is invalid. */
1973 replacement = find_fixup_replacement (replacements, var);
1974 if (replacement->new)
1976 *loc = replacement->new;
1977 return;
1980 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1982 /* Unless we are forcing memory to register or we changed the mode,
1983 we can leave things the way they are if the insn is valid. */
1985 INSN_CODE (insn) = -1;
1986 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1987 && recog_memoized (insn) >= 0)
1988 return;
1990 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1991 return;
1994 /* If X contains VAR, we need to unshare it here so that we update
1995 each occurrence separately. But all identical MEMs in one insn
1996 must be replaced with the same rtx because of the possibility of
1997 MATCH_DUPs. */
1999 if (reg_mentioned_p (var, x))
2001 replacement = find_fixup_replacement (replacements, x);
2002 if (replacement->new == 0)
2003 replacement->new = copy_most_rtx (x, no_share);
2005 *loc = x = replacement->new;
2006 code = GET_CODE (x);
2008 break;
2010 case REG:
2011 case CC0:
2012 case PC:
2013 case CONST_INT:
2014 case CONST:
2015 case SYMBOL_REF:
2016 case LABEL_REF:
2017 case CONST_DOUBLE:
2018 case CONST_VECTOR:
2019 return;
2021 case SIGN_EXTRACT:
2022 case ZERO_EXTRACT:
2023 /* Note that in some cases those types of expressions are altered
2024 by optimize_bit_field, and do not survive to get here. */
2025 if (XEXP (x, 0) == var
2026 || (GET_CODE (XEXP (x, 0)) == SUBREG
2027 && SUBREG_REG (XEXP (x, 0)) == var))
2029 /* Get TEM as a valid MEM in the mode presently in the insn.
2031 We don't worry about the possibility of MATCH_DUP here; it
2032 is highly unlikely and would be tricky to handle. */
2034 tem = XEXP (x, 0);
2035 if (GET_CODE (tem) == SUBREG)
2037 if (GET_MODE_BITSIZE (GET_MODE (tem))
2038 > GET_MODE_BITSIZE (GET_MODE (var)))
2040 replacement = find_fixup_replacement (replacements, var);
2041 if (replacement->new == 0)
2042 replacement->new = gen_reg_rtx (GET_MODE (var));
2043 SUBREG_REG (tem) = replacement->new;
2045 /* The following code works only if we have a MEM, so we
2046 need to handle the subreg here. We directly substitute
2047 it assuming that a subreg must be OK here. We already
2048 scheduled a replacement to copy the mem into the
2049 subreg. */
2050 XEXP (x, 0) = tem;
2051 return;
2053 else
2054 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2056 else
2057 tem = fixup_stack_1 (tem, insn);
2059 /* Unless we want to load from memory, get TEM into the proper mode
2060 for an extract from memory. This can only be done if the
2061 extract is at a constant position and length. */
2063 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2064 && GET_CODE (XEXP (x, 2)) == CONST_INT
2065 && ! mode_dependent_address_p (XEXP (tem, 0))
2066 && ! MEM_VOLATILE_P (tem))
2068 enum machine_mode wanted_mode = VOIDmode;
2069 enum machine_mode is_mode = GET_MODE (tem);
2070 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2072 if (GET_CODE (x) == ZERO_EXTRACT)
2074 enum machine_mode new_mode
2075 = mode_for_extraction (EP_extzv, 1);
2076 if (new_mode != MAX_MACHINE_MODE)
2077 wanted_mode = new_mode;
2079 else if (GET_CODE (x) == SIGN_EXTRACT)
2081 enum machine_mode new_mode
2082 = mode_for_extraction (EP_extv, 1);
2083 if (new_mode != MAX_MACHINE_MODE)
2084 wanted_mode = new_mode;
2087 /* If we have a narrower mode, we can do something. */
2088 if (wanted_mode != VOIDmode
2089 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2091 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2092 rtx old_pos = XEXP (x, 2);
2093 rtx newmem;
2095 /* If the bytes and bits are counted differently, we
2096 must adjust the offset. */
2097 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2098 offset = (GET_MODE_SIZE (is_mode)
2099 - GET_MODE_SIZE (wanted_mode) - offset);
2101 pos %= GET_MODE_BITSIZE (wanted_mode);
2103 newmem = adjust_address_nv (tem, wanted_mode, offset);
2105 /* Make the change and see if the insn remains valid. */
2106 INSN_CODE (insn) = -1;
2107 XEXP (x, 0) = newmem;
2108 XEXP (x, 2) = GEN_INT (pos);
2110 if (recog_memoized (insn) >= 0)
2111 return;
2113 /* Otherwise, restore old position. XEXP (x, 0) will be
2114 restored later. */
2115 XEXP (x, 2) = old_pos;
2119 /* If we get here, the bitfield extract insn can't accept a memory
2120 reference. Copy the input into a register. */
2122 tem1 = gen_reg_rtx (GET_MODE (tem));
2123 emit_insn_before (gen_move_insn (tem1, tem), insn);
2124 XEXP (x, 0) = tem1;
2125 return;
2127 break;
2129 case SUBREG:
2130 if (SUBREG_REG (x) == var)
2132 /* If this is a special SUBREG made because VAR was promoted
2133 from a wider mode, replace it with VAR and call ourself
2134 recursively, this time saying that the object previously
2135 had its current mode (by virtue of the SUBREG). */
2137 if (SUBREG_PROMOTED_VAR_P (x))
2139 *loc = var;
2140 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2141 no_share);
2142 return;
2145 /* If this SUBREG makes VAR wider, it has become a paradoxical
2146 SUBREG with VAR in memory, but these aren't allowed at this
2147 stage of the compilation. So load VAR into a pseudo and take
2148 a SUBREG of that pseudo. */
2149 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2151 replacement = find_fixup_replacement (replacements, var);
2152 if (replacement->new == 0)
2153 replacement->new = gen_reg_rtx (promoted_mode);
2154 SUBREG_REG (x) = replacement->new;
2155 return;
2158 /* See if we have already found a replacement for this SUBREG.
2159 If so, use it. Otherwise, make a MEM and see if the insn
2160 is recognized. If not, or if we should force MEM into a register,
2161 make a pseudo for this SUBREG. */
2162 replacement = find_fixup_replacement (replacements, x);
2163 if (replacement->new)
2165 *loc = replacement->new;
2166 return;
2169 replacement->new = *loc = fixup_memory_subreg (x, insn,
2170 promoted_mode, 0);
2172 INSN_CODE (insn) = -1;
2173 if (! flag_force_mem && recog_memoized (insn) >= 0)
2174 return;
2176 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2177 return;
2179 break;
2181 case SET:
2182 /* First do special simplification of bit-field references. */
2183 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2184 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2185 optimize_bit_field (x, insn, 0);
2186 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2187 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2188 optimize_bit_field (x, insn, 0);
2190 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2191 into a register and then store it back out. */
2192 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2193 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2194 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2195 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2196 > GET_MODE_SIZE (GET_MODE (var))))
2198 replacement = find_fixup_replacement (replacements, var);
2199 if (replacement->new == 0)
2200 replacement->new = gen_reg_rtx (GET_MODE (var));
2202 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2203 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2206 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2207 insn into a pseudo and store the low part of the pseudo into VAR. */
2208 if (GET_CODE (SET_DEST (x)) == SUBREG
2209 && SUBREG_REG (SET_DEST (x)) == var
2210 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2211 > GET_MODE_SIZE (GET_MODE (var))))
2213 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2214 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2215 tem)),
2216 insn);
2217 break;
2221 rtx dest = SET_DEST (x);
2222 rtx src = SET_SRC (x);
2223 rtx outerdest = dest;
2225 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2226 || GET_CODE (dest) == SIGN_EXTRACT
2227 || GET_CODE (dest) == ZERO_EXTRACT)
2228 dest = XEXP (dest, 0);
2230 if (GET_CODE (src) == SUBREG)
2231 src = SUBREG_REG (src);
2233 /* If VAR does not appear at the top level of the SET
2234 just scan the lower levels of the tree. */
2236 if (src != var && dest != var)
2237 break;
2239 /* We will need to rerecognize this insn. */
2240 INSN_CODE (insn) = -1;
2242 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2243 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2245 /* Since this case will return, ensure we fixup all the
2246 operands here. */
2247 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2248 insn, replacements, no_share);
2249 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2250 insn, replacements, no_share);
2251 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2252 insn, replacements, no_share);
2254 tem = XEXP (outerdest, 0);
2256 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2257 that may appear inside a ZERO_EXTRACT.
2258 This was legitimate when the MEM was a REG. */
2259 if (GET_CODE (tem) == SUBREG
2260 && SUBREG_REG (tem) == var)
2261 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2262 else
2263 tem = fixup_stack_1 (tem, insn);
2265 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2266 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2267 && ! mode_dependent_address_p (XEXP (tem, 0))
2268 && ! MEM_VOLATILE_P (tem))
2270 enum machine_mode wanted_mode;
2271 enum machine_mode is_mode = GET_MODE (tem);
2272 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2274 wanted_mode = mode_for_extraction (EP_insv, 0);
2276 /* If we have a narrower mode, we can do something. */
2277 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2279 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2280 rtx old_pos = XEXP (outerdest, 2);
2281 rtx newmem;
2283 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2284 offset = (GET_MODE_SIZE (is_mode)
2285 - GET_MODE_SIZE (wanted_mode) - offset);
2287 pos %= GET_MODE_BITSIZE (wanted_mode);
2289 newmem = adjust_address_nv (tem, wanted_mode, offset);
2291 /* Make the change and see if the insn remains valid. */
2292 INSN_CODE (insn) = -1;
2293 XEXP (outerdest, 0) = newmem;
2294 XEXP (outerdest, 2) = GEN_INT (pos);
2296 if (recog_memoized (insn) >= 0)
2297 return;
2299 /* Otherwise, restore old position. XEXP (x, 0) will be
2300 restored later. */
2301 XEXP (outerdest, 2) = old_pos;
2305 /* If we get here, the bit-field store doesn't allow memory
2306 or isn't located at a constant position. Load the value into
2307 a register, do the store, and put it back into memory. */
2309 tem1 = gen_reg_rtx (GET_MODE (tem));
2310 emit_insn_before (gen_move_insn (tem1, tem), insn);
2311 emit_insn_after (gen_move_insn (tem, tem1), insn);
2312 XEXP (outerdest, 0) = tem1;
2313 return;
2316 /* STRICT_LOW_PART is a no-op on memory references
2317 and it can cause combinations to be unrecognizable,
2318 so eliminate it. */
2320 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2321 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2323 /* A valid insn to copy VAR into or out of a register
2324 must be left alone, to avoid an infinite loop here.
2325 If the reference to VAR is by a subreg, fix that up,
2326 since SUBREG is not valid for a memref.
2327 Also fix up the address of the stack slot.
2329 Note that we must not try to recognize the insn until
2330 after we know that we have valid addresses and no
2331 (subreg (mem ...) ...) constructs, since these interfere
2332 with determining the validity of the insn. */
2334 if ((SET_SRC (x) == var
2335 || (GET_CODE (SET_SRC (x)) == SUBREG
2336 && SUBREG_REG (SET_SRC (x)) == var))
2337 && (GET_CODE (SET_DEST (x)) == REG
2338 || (GET_CODE (SET_DEST (x)) == SUBREG
2339 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2340 && GET_MODE (var) == promoted_mode
2341 && x == single_set (insn))
2343 rtx pat, last;
2345 if (GET_CODE (SET_SRC (x)) == SUBREG
2346 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2347 > GET_MODE_SIZE (GET_MODE (var))))
2349 /* This (subreg VAR) is now a paradoxical subreg. We need
2350 to replace VAR instead of the subreg. */
2351 replacement = find_fixup_replacement (replacements, var);
2352 if (replacement->new == NULL_RTX)
2353 replacement->new = gen_reg_rtx (GET_MODE (var));
2354 SUBREG_REG (SET_SRC (x)) = replacement->new;
2356 else
2358 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2359 if (replacement->new)
2360 SET_SRC (x) = replacement->new;
2361 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2362 SET_SRC (x) = replacement->new
2363 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2365 else
2366 SET_SRC (x) = replacement->new
2367 = fixup_stack_1 (SET_SRC (x), insn);
2370 if (recog_memoized (insn) >= 0)
2371 return;
2373 /* INSN is not valid, but we know that we want to
2374 copy SET_SRC (x) to SET_DEST (x) in some way. So
2375 we generate the move and see whether it requires more
2376 than one insn. If it does, we emit those insns and
2377 delete INSN. Otherwise, we can just replace the pattern
2378 of INSN; we have already verified above that INSN has
2379 no other function that to do X. */
2381 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2382 if (NEXT_INSN (pat) != NULL_RTX)
2384 last = emit_insn_before (pat, insn);
2386 /* INSN might have REG_RETVAL or other important notes, so
2387 we need to store the pattern of the last insn in the
2388 sequence into INSN similarly to the normal case. LAST
2389 should not have REG_NOTES, but we allow them if INSN has
2390 no REG_NOTES. */
2391 if (REG_NOTES (last) && REG_NOTES (insn))
2392 abort ();
2393 if (REG_NOTES (last))
2394 REG_NOTES (insn) = REG_NOTES (last);
2395 PATTERN (insn) = PATTERN (last);
2397 delete_insn (last);
2399 else
2400 PATTERN (insn) = PATTERN (pat);
2402 return;
2405 if ((SET_DEST (x) == var
2406 || (GET_CODE (SET_DEST (x)) == SUBREG
2407 && SUBREG_REG (SET_DEST (x)) == var))
2408 && (GET_CODE (SET_SRC (x)) == REG
2409 || (GET_CODE (SET_SRC (x)) == SUBREG
2410 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2411 && GET_MODE (var) == promoted_mode
2412 && x == single_set (insn))
2414 rtx pat, last;
2416 if (GET_CODE (SET_DEST (x)) == SUBREG)
2417 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2418 promoted_mode, 0);
2419 else
2420 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2422 if (recog_memoized (insn) >= 0)
2423 return;
2425 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2426 if (NEXT_INSN (pat) != NULL_RTX)
2428 last = emit_insn_before (pat, insn);
2430 /* INSN might have REG_RETVAL or other important notes, so
2431 we need to store the pattern of the last insn in the
2432 sequence into INSN similarly to the normal case. LAST
2433 should not have REG_NOTES, but we allow them if INSN has
2434 no REG_NOTES. */
2435 if (REG_NOTES (last) && REG_NOTES (insn))
2436 abort ();
2437 if (REG_NOTES (last))
2438 REG_NOTES (insn) = REG_NOTES (last);
2439 PATTERN (insn) = PATTERN (last);
2441 delete_insn (last);
2443 else
2444 PATTERN (insn) = PATTERN (pat);
2446 return;
2449 /* Otherwise, storing into VAR must be handled specially
2450 by storing into a temporary and copying that into VAR
2451 with a new insn after this one. Note that this case
2452 will be used when storing into a promoted scalar since
2453 the insn will now have different modes on the input
2454 and output and hence will be invalid (except for the case
2455 of setting it to a constant, which does not need any
2456 change if it is valid). We generate extra code in that case,
2457 but combine.c will eliminate it. */
2459 if (dest == var)
2461 rtx temp;
2462 rtx fixeddest = SET_DEST (x);
2463 enum machine_mode temp_mode;
2465 /* STRICT_LOW_PART can be discarded, around a MEM. */
2466 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2467 fixeddest = XEXP (fixeddest, 0);
2468 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2469 if (GET_CODE (fixeddest) == SUBREG)
2471 fixeddest = fixup_memory_subreg (fixeddest, insn,
2472 promoted_mode, 0);
2473 temp_mode = GET_MODE (fixeddest);
2475 else
2477 fixeddest = fixup_stack_1 (fixeddest, insn);
2478 temp_mode = promoted_mode;
2481 temp = gen_reg_rtx (temp_mode);
2483 emit_insn_after (gen_move_insn (fixeddest,
2484 gen_lowpart (GET_MODE (fixeddest),
2485 temp)),
2486 insn);
2488 SET_DEST (x) = temp;
2492 default:
2493 break;
2496 /* Nothing special about this RTX; fix its operands. */
2498 fmt = GET_RTX_FORMAT (code);
2499 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2501 if (fmt[i] == 'e')
2502 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2503 no_share);
2504 else if (fmt[i] == 'E')
2506 int j;
2507 for (j = 0; j < XVECLEN (x, i); j++)
2508 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2509 insn, replacements, no_share);
2514 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2515 The REG was placed on the stack, so X now has the form (SUBREG:m1
2516 (MEM:m2 ...)).
2518 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2519 must be emitted to compute NEWADDR, put them before INSN.
2521 UNCRITICAL nonzero means accept paradoxical subregs.
2522 This is used for subregs found inside REG_NOTES. */
2524 static rtx
2525 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2526 rtx x;
2527 rtx insn;
2528 enum machine_mode promoted_mode;
2529 int uncritical;
2531 int offset;
2532 rtx mem = SUBREG_REG (x);
2533 rtx addr = XEXP (mem, 0);
2534 enum machine_mode mode = GET_MODE (x);
2535 rtx result, seq;
2537 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2538 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2539 abort ();
2541 offset = SUBREG_BYTE (x);
2542 if (BYTES_BIG_ENDIAN)
2543 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2544 the offset so that it points to the right location within the
2545 MEM. */
2546 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2548 if (!flag_force_addr
2549 && memory_address_p (mode, plus_constant (addr, offset)))
2550 /* Shortcut if no insns need be emitted. */
2551 return adjust_address (mem, mode, offset);
2553 start_sequence ();
2554 result = adjust_address (mem, mode, offset);
2555 seq = get_insns ();
2556 end_sequence ();
2558 emit_insn_before (seq, insn);
2559 return result;
2562 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2563 Replace subexpressions of X in place.
2564 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2565 Otherwise return X, with its contents possibly altered.
2567 INSN, PROMOTED_MODE and UNCRITICAL are as for
2568 fixup_memory_subreg. */
2570 static rtx
2571 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2572 rtx x;
2573 rtx insn;
2574 enum machine_mode promoted_mode;
2575 int uncritical;
2577 enum rtx_code code;
2578 const char *fmt;
2579 int i;
2581 if (x == 0)
2582 return 0;
2584 code = GET_CODE (x);
2586 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2587 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2589 /* Nothing special about this RTX; fix its operands. */
2591 fmt = GET_RTX_FORMAT (code);
2592 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2594 if (fmt[i] == 'e')
2595 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2596 promoted_mode, uncritical);
2597 else if (fmt[i] == 'E')
2599 int j;
2600 for (j = 0; j < XVECLEN (x, i); j++)
2601 XVECEXP (x, i, j)
2602 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2603 promoted_mode, uncritical);
2606 return x;
2609 /* For each memory ref within X, if it refers to a stack slot
2610 with an out of range displacement, put the address in a temp register
2611 (emitting new insns before INSN to load these registers)
2612 and alter the memory ref to use that register.
2613 Replace each such MEM rtx with a copy, to avoid clobberage. */
2615 static rtx
2616 fixup_stack_1 (x, insn)
2617 rtx x;
2618 rtx insn;
2620 int i;
2621 RTX_CODE code = GET_CODE (x);
2622 const char *fmt;
2624 if (code == MEM)
2626 rtx ad = XEXP (x, 0);
2627 /* If we have address of a stack slot but it's not valid
2628 (displacement is too large), compute the sum in a register. */
2629 if (GET_CODE (ad) == PLUS
2630 && GET_CODE (XEXP (ad, 0)) == REG
2631 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2632 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2633 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2634 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2635 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2636 #endif
2637 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2638 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2639 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2640 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2642 rtx temp, seq;
2643 if (memory_address_p (GET_MODE (x), ad))
2644 return x;
2646 start_sequence ();
2647 temp = copy_to_reg (ad);
2648 seq = get_insns ();
2649 end_sequence ();
2650 emit_insn_before (seq, insn);
2651 return replace_equiv_address (x, temp);
2653 return x;
2656 fmt = GET_RTX_FORMAT (code);
2657 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2659 if (fmt[i] == 'e')
2660 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2661 else if (fmt[i] == 'E')
2663 int j;
2664 for (j = 0; j < XVECLEN (x, i); j++)
2665 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2668 return x;
2671 /* Optimization: a bit-field instruction whose field
2672 happens to be a byte or halfword in memory
2673 can be changed to a move instruction.
2675 We call here when INSN is an insn to examine or store into a bit-field.
2676 BODY is the SET-rtx to be altered.
2678 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2679 (Currently this is called only from function.c, and EQUIV_MEM
2680 is always 0.) */
2682 static void
2683 optimize_bit_field (body, insn, equiv_mem)
2684 rtx body;
2685 rtx insn;
2686 rtx *equiv_mem;
2688 rtx bitfield;
2689 int destflag;
2690 rtx seq = 0;
2691 enum machine_mode mode;
2693 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2694 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2695 bitfield = SET_DEST (body), destflag = 1;
2696 else
2697 bitfield = SET_SRC (body), destflag = 0;
2699 /* First check that the field being stored has constant size and position
2700 and is in fact a byte or halfword suitably aligned. */
2702 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2703 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2704 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2705 != BLKmode)
2706 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2708 rtx memref = 0;
2710 /* Now check that the containing word is memory, not a register,
2711 and that it is safe to change the machine mode. */
2713 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2714 memref = XEXP (bitfield, 0);
2715 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2716 && equiv_mem != 0)
2717 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2718 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2719 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2720 memref = SUBREG_REG (XEXP (bitfield, 0));
2721 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2722 && equiv_mem != 0
2723 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2724 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2726 if (memref
2727 && ! mode_dependent_address_p (XEXP (memref, 0))
2728 && ! MEM_VOLATILE_P (memref))
2730 /* Now adjust the address, first for any subreg'ing
2731 that we are now getting rid of,
2732 and then for which byte of the word is wanted. */
2734 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2735 rtx insns;
2737 /* Adjust OFFSET to count bits from low-address byte. */
2738 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2739 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2740 - offset - INTVAL (XEXP (bitfield, 1)));
2742 /* Adjust OFFSET to count bytes from low-address byte. */
2743 offset /= BITS_PER_UNIT;
2744 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2746 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2747 / UNITS_PER_WORD) * UNITS_PER_WORD;
2748 if (BYTES_BIG_ENDIAN)
2749 offset -= (MIN (UNITS_PER_WORD,
2750 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2751 - MIN (UNITS_PER_WORD,
2752 GET_MODE_SIZE (GET_MODE (memref))));
2755 start_sequence ();
2756 memref = adjust_address (memref, mode, offset);
2757 insns = get_insns ();
2758 end_sequence ();
2759 emit_insn_before (insns, insn);
2761 /* Store this memory reference where
2762 we found the bit field reference. */
2764 if (destflag)
2766 validate_change (insn, &SET_DEST (body), memref, 1);
2767 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2769 rtx src = SET_SRC (body);
2770 while (GET_CODE (src) == SUBREG
2771 && SUBREG_BYTE (src) == 0)
2772 src = SUBREG_REG (src);
2773 if (GET_MODE (src) != GET_MODE (memref))
2774 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2775 validate_change (insn, &SET_SRC (body), src, 1);
2777 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2778 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2779 /* This shouldn't happen because anything that didn't have
2780 one of these modes should have got converted explicitly
2781 and then referenced through a subreg.
2782 This is so because the original bit-field was
2783 handled by agg_mode and so its tree structure had
2784 the same mode that memref now has. */
2785 abort ();
2787 else
2789 rtx dest = SET_DEST (body);
2791 while (GET_CODE (dest) == SUBREG
2792 && SUBREG_BYTE (dest) == 0
2793 && (GET_MODE_CLASS (GET_MODE (dest))
2794 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2795 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2796 <= UNITS_PER_WORD))
2797 dest = SUBREG_REG (dest);
2799 validate_change (insn, &SET_DEST (body), dest, 1);
2801 if (GET_MODE (dest) == GET_MODE (memref))
2802 validate_change (insn, &SET_SRC (body), memref, 1);
2803 else
2805 /* Convert the mem ref to the destination mode. */
2806 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2808 start_sequence ();
2809 convert_move (newreg, memref,
2810 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2811 seq = get_insns ();
2812 end_sequence ();
2814 validate_change (insn, &SET_SRC (body), newreg, 1);
2818 /* See if we can convert this extraction or insertion into
2819 a simple move insn. We might not be able to do so if this
2820 was, for example, part of a PARALLEL.
2822 If we succeed, write out any needed conversions. If we fail,
2823 it is hard to guess why we failed, so don't do anything
2824 special; just let the optimization be suppressed. */
2826 if (apply_change_group () && seq)
2827 emit_insn_before (seq, insn);
2832 /* These routines are responsible for converting virtual register references
2833 to the actual hard register references once RTL generation is complete.
2835 The following four variables are used for communication between the
2836 routines. They contain the offsets of the virtual registers from their
2837 respective hard registers. */
2839 static int in_arg_offset;
2840 static int var_offset;
2841 static int dynamic_offset;
2842 static int out_arg_offset;
2843 static int cfa_offset;
2845 /* In most machines, the stack pointer register is equivalent to the bottom
2846 of the stack. */
2848 #ifndef STACK_POINTER_OFFSET
2849 #define STACK_POINTER_OFFSET 0
2850 #endif
2852 /* If not defined, pick an appropriate default for the offset of dynamically
2853 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2854 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2856 #ifndef STACK_DYNAMIC_OFFSET
2858 /* The bottom of the stack points to the actual arguments. If
2859 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2860 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2861 stack space for register parameters is not pushed by the caller, but
2862 rather part of the fixed stack areas and hence not included in
2863 `current_function_outgoing_args_size'. Nevertheless, we must allow
2864 for it when allocating stack dynamic objects. */
2866 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2867 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2868 ((ACCUMULATE_OUTGOING_ARGS \
2869 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2870 + (STACK_POINTER_OFFSET)) \
2872 #else
2873 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2874 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2875 + (STACK_POINTER_OFFSET))
2876 #endif
2877 #endif
2879 /* On most machines, the CFA coincides with the first incoming parm. */
2881 #ifndef ARG_POINTER_CFA_OFFSET
2882 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2883 #endif
2885 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2886 had its address taken. DECL is the decl or SAVE_EXPR for the
2887 object stored in the register, for later use if we do need to force
2888 REG into the stack. REG is overwritten by the MEM like in
2889 put_reg_into_stack. RESCAN is true if previously emitted
2890 instructions must be rescanned and modified now that the REG has
2891 been transformed. */
2894 gen_mem_addressof (reg, decl, rescan)
2895 rtx reg;
2896 tree decl;
2897 int rescan;
2899 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2900 REGNO (reg), decl);
2902 /* Calculate this before we start messing with decl's RTL. */
2903 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2905 /* If the original REG was a user-variable, then so is the REG whose
2906 address is being taken. Likewise for unchanging. */
2907 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2908 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2910 PUT_CODE (reg, MEM);
2911 MEM_ATTRS (reg) = 0;
2912 XEXP (reg, 0) = r;
2914 if (decl)
2916 tree type = TREE_TYPE (decl);
2917 enum machine_mode decl_mode
2918 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2919 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2920 : DECL_RTL_IF_SET (decl));
2922 PUT_MODE (reg, decl_mode);
2924 /* Clear DECL_RTL momentarily so functions below will work
2925 properly, then set it again. */
2926 if (DECL_P (decl) && decl_rtl == reg)
2927 SET_DECL_RTL (decl, 0);
2929 set_mem_attributes (reg, decl, 1);
2930 set_mem_alias_set (reg, set);
2932 if (DECL_P (decl) && decl_rtl == reg)
2933 SET_DECL_RTL (decl, reg);
2935 if (rescan
2936 && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
2937 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2939 else if (rescan)
2940 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2942 return reg;
2945 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2947 void
2948 flush_addressof (decl)
2949 tree decl;
2951 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2952 && DECL_RTL (decl) != 0
2953 && GET_CODE (DECL_RTL (decl)) == MEM
2954 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2955 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2956 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2959 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2961 static void
2962 put_addressof_into_stack (r, ht)
2963 rtx r;
2964 htab_t ht;
2966 tree decl, type;
2967 int volatile_p, used_p;
2969 rtx reg = XEXP (r, 0);
2971 if (GET_CODE (reg) != REG)
2972 abort ();
2974 decl = ADDRESSOF_DECL (r);
2975 if (decl)
2977 type = TREE_TYPE (decl);
2978 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2979 && TREE_THIS_VOLATILE (decl));
2980 used_p = (TREE_USED (decl)
2981 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2983 else
2985 type = NULL_TREE;
2986 volatile_p = 0;
2987 used_p = 1;
2990 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2991 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2994 /* List of replacements made below in purge_addressof_1 when creating
2995 bitfield insertions. */
2996 static rtx purge_bitfield_addressof_replacements;
2998 /* List of replacements made below in purge_addressof_1 for patterns
2999 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3000 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3001 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3002 enough in complex cases, e.g. when some field values can be
3003 extracted by usage MEM with narrower mode. */
3004 static rtx purge_addressof_replacements;
3006 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3007 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3008 the stack. If the function returns FALSE then the replacement could not
3009 be made. If MAY_POSTPONE is true and we would not put the addressof
3010 to stack, postpone processing of the insn. */
3012 static bool
3013 purge_addressof_1 (loc, insn, force, store, may_postpone, ht)
3014 rtx *loc;
3015 rtx insn;
3016 int force, store, may_postpone;
3017 htab_t ht;
3019 rtx x;
3020 RTX_CODE code;
3021 int i, j;
3022 const char *fmt;
3023 bool result = true;
3025 /* Re-start here to avoid recursion in common cases. */
3026 restart:
3028 x = *loc;
3029 if (x == 0)
3030 return true;
3032 code = GET_CODE (x);
3034 /* If we don't return in any of the cases below, we will recurse inside
3035 the RTX, which will normally result in any ADDRESSOF being forced into
3036 memory. */
3037 if (code == SET)
3039 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1,
3040 may_postpone, ht);
3041 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0,
3042 may_postpone, ht);
3043 return result;
3045 else if (code == ADDRESSOF)
3047 rtx sub, insns;
3049 if (GET_CODE (XEXP (x, 0)) != MEM)
3050 put_addressof_into_stack (x, ht);
3052 /* We must create a copy of the rtx because it was created by
3053 overwriting a REG rtx which is always shared. */
3054 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3055 if (validate_change (insn, loc, sub, 0)
3056 || validate_replace_rtx (x, sub, insn))
3057 return true;
3059 start_sequence ();
3061 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3062 Otherwise, perhaps SUB is an expression, so generate code to compute
3063 it. */
3064 if (GET_CODE (sub) == REG && REGNO (sub) <= LAST_VIRTUAL_REGISTER)
3065 sub = copy_to_reg (sub);
3066 else
3067 sub = force_operand (sub, NULL_RTX);
3069 if (! validate_change (insn, loc, sub, 0)
3070 && ! validate_replace_rtx (x, sub, insn))
3071 abort ();
3073 insns = get_insns ();
3074 end_sequence ();
3075 emit_insn_before (insns, insn);
3076 return true;
3079 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3081 rtx sub = XEXP (XEXP (x, 0), 0);
3083 if (GET_CODE (sub) == MEM)
3084 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3085 else if (GET_CODE (sub) == REG
3086 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3088 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3090 int size_x, size_sub;
3092 if (may_postpone)
3094 /* Postpone for now, so that we do not emit bitfield arithmetics
3095 unless there is some benefit from it. */
3096 if (!postponed_insns || XEXP (postponed_insns, 0) != insn)
3097 postponed_insns = alloc_INSN_LIST (insn, postponed_insns);
3098 return true;
3101 if (!insn)
3103 /* When processing REG_NOTES look at the list of
3104 replacements done on the insn to find the register that X
3105 was replaced by. */
3106 rtx tem;
3108 for (tem = purge_bitfield_addressof_replacements;
3109 tem != NULL_RTX;
3110 tem = XEXP (XEXP (tem, 1), 1))
3111 if (rtx_equal_p (x, XEXP (tem, 0)))
3113 *loc = XEXP (XEXP (tem, 1), 0);
3114 return true;
3117 /* See comment for purge_addressof_replacements. */
3118 for (tem = purge_addressof_replacements;
3119 tem != NULL_RTX;
3120 tem = XEXP (XEXP (tem, 1), 1))
3121 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3123 rtx z = XEXP (XEXP (tem, 1), 0);
3125 if (GET_MODE (x) == GET_MODE (z)
3126 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3127 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3128 abort ();
3130 /* It can happen that the note may speak of things
3131 in a wider (or just different) mode than the
3132 code did. This is especially true of
3133 REG_RETVAL. */
3135 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3136 z = SUBREG_REG (z);
3138 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3139 && (GET_MODE_SIZE (GET_MODE (x))
3140 > GET_MODE_SIZE (GET_MODE (z))))
3142 /* This can occur as a result in invalid
3143 pointer casts, e.g. float f; ...
3144 *(long long int *)&f.
3145 ??? We could emit a warning here, but
3146 without a line number that wouldn't be
3147 very helpful. */
3148 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3150 else
3151 z = gen_lowpart (GET_MODE (x), z);
3153 *loc = z;
3154 return true;
3157 /* When we are processing the REG_NOTES of the last instruction
3158 of a libcall, there will be typically no replacements
3159 for that insn; the replacements happened before, piecemeal
3160 fashion. OTOH we are not interested in the details of
3161 this for the REG_EQUAL note, we want to know the big picture,
3162 which can be succinctly described with a simple SUBREG.
3163 Note that removing the REG_EQUAL note is not an option
3164 on the last insn of a libcall, so we must do a replacement. */
3165 if (! purge_addressof_replacements
3166 && ! purge_bitfield_addressof_replacements)
3168 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3169 we got
3170 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3171 [0 S8 A32]), which can be expressed with a simple
3172 same-size subreg */
3173 if ((GET_MODE_SIZE (GET_MODE (x))
3174 == GET_MODE_SIZE (GET_MODE (sub)))
3175 /* Again, invalid pointer casts (as in
3176 compile/990203-1.c) can require paradoxical
3177 subregs. */
3178 || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3179 && (GET_MODE_SIZE (GET_MODE (x))
3180 > GET_MODE_SIZE (GET_MODE (sub)))))
3182 *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
3183 return true;
3185 /* ??? Are there other cases we should handle? */
3187 /* Sometimes we may not be able to find the replacement. For
3188 example when the original insn was a MEM in a wider mode,
3189 and the note is part of a sign extension of a narrowed
3190 version of that MEM. Gcc testcase compile/990829-1.c can
3191 generate an example of this situation. Rather than complain
3192 we return false, which will prompt our caller to remove the
3193 offending note. */
3194 return false;
3197 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3198 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3200 /* Do not frob unchanging MEMs. If a later reference forces the
3201 pseudo to the stack, we can wind up with multiple writes to
3202 an unchanging memory, which is invalid. */
3203 if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3206 /* Don't even consider working with paradoxical subregs,
3207 or the moral equivalent seen here. */
3208 else if (size_x <= size_sub
3209 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3211 /* Do a bitfield insertion to mirror what would happen
3212 in memory. */
3214 rtx val, seq;
3216 if (store)
3218 rtx p = PREV_INSN (insn);
3220 start_sequence ();
3221 val = gen_reg_rtx (GET_MODE (x));
3222 if (! validate_change (insn, loc, val, 0))
3224 /* Discard the current sequence and put the
3225 ADDRESSOF on stack. */
3226 end_sequence ();
3227 goto give_up;
3229 seq = get_insns ();
3230 end_sequence ();
3231 emit_insn_before (seq, insn);
3232 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3233 insn, ht);
3235 start_sequence ();
3236 store_bit_field (sub, size_x, 0, GET_MODE (x),
3237 val, GET_MODE_SIZE (GET_MODE (sub)));
3239 /* Make sure to unshare any shared rtl that store_bit_field
3240 might have created. */
3241 unshare_all_rtl_again (get_insns ());
3243 seq = get_insns ();
3244 end_sequence ();
3245 p = emit_insn_after (seq, insn);
3246 if (NEXT_INSN (insn))
3247 compute_insns_for_mem (NEXT_INSN (insn),
3248 p ? NEXT_INSN (p) : NULL_RTX,
3249 ht);
3251 else
3253 rtx p = PREV_INSN (insn);
3255 start_sequence ();
3256 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3257 GET_MODE (x), GET_MODE (x),
3258 GET_MODE_SIZE (GET_MODE (sub)));
3260 if (! validate_change (insn, loc, val, 0))
3262 /* Discard the current sequence and put the
3263 ADDRESSOF on stack. */
3264 end_sequence ();
3265 goto give_up;
3268 seq = get_insns ();
3269 end_sequence ();
3270 emit_insn_before (seq, insn);
3271 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3272 insn, ht);
3275 /* Remember the replacement so that the same one can be done
3276 on the REG_NOTES. */
3277 purge_bitfield_addressof_replacements
3278 = gen_rtx_EXPR_LIST (VOIDmode, x,
3279 gen_rtx_EXPR_LIST
3280 (VOIDmode, val,
3281 purge_bitfield_addressof_replacements));
3283 /* We replaced with a reg -- all done. */
3284 return true;
3288 else if (validate_change (insn, loc, sub, 0))
3290 /* Remember the replacement so that the same one can be done
3291 on the REG_NOTES. */
3292 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3294 rtx tem;
3296 for (tem = purge_addressof_replacements;
3297 tem != NULL_RTX;
3298 tem = XEXP (XEXP (tem, 1), 1))
3299 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3301 XEXP (XEXP (tem, 1), 0) = sub;
3302 return true;
3304 purge_addressof_replacements
3305 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3306 gen_rtx_EXPR_LIST (VOIDmode, sub,
3307 purge_addressof_replacements));
3308 return true;
3310 goto restart;
3314 give_up:
3315 /* Scan all subexpressions. */
3316 fmt = GET_RTX_FORMAT (code);
3317 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3319 if (*fmt == 'e')
3320 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0,
3321 may_postpone, ht);
3322 else if (*fmt == 'E')
3323 for (j = 0; j < XVECLEN (x, i); j++)
3324 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0,
3325 may_postpone, ht);
3328 return result;
3331 /* Return a hash value for K, a REG. */
3333 static hashval_t
3334 insns_for_mem_hash (k)
3335 const void * k;
3337 /* Use the address of the key for the hash value. */
3338 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3339 return htab_hash_pointer (m->key);
3342 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3344 static int
3345 insns_for_mem_comp (k1, k2)
3346 const void * k1;
3347 const void * k2;
3349 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3350 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3351 return m1->key == m2->key;
3354 struct insns_for_mem_walk_info
3356 /* The hash table that we are using to record which INSNs use which
3357 MEMs. */
3358 htab_t ht;
3360 /* The INSN we are currently processing. */
3361 rtx insn;
3363 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3364 to find the insns that use the REGs in the ADDRESSOFs. */
3365 int pass;
3368 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3369 that might be used in an ADDRESSOF expression, record this INSN in
3370 the hash table given by DATA (which is really a pointer to an
3371 insns_for_mem_walk_info structure). */
3373 static int
3374 insns_for_mem_walk (r, data)
3375 rtx *r;
3376 void *data;
3378 struct insns_for_mem_walk_info *ifmwi
3379 = (struct insns_for_mem_walk_info *) data;
3380 struct insns_for_mem_entry tmp;
3381 tmp.insns = NULL_RTX;
3383 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3384 && GET_CODE (XEXP (*r, 0)) == REG)
3386 PTR *e;
3387 tmp.key = XEXP (*r, 0);
3388 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3389 if (*e == NULL)
3391 *e = ggc_alloc (sizeof (tmp));
3392 memcpy (*e, &tmp, sizeof (tmp));
3395 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3397 struct insns_for_mem_entry *ifme;
3398 tmp.key = *r;
3399 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3401 /* If we have not already recorded this INSN, do so now. Since
3402 we process the INSNs in order, we know that if we have
3403 recorded it it must be at the front of the list. */
3404 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3405 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3406 ifme->insns);
3409 return 0;
3412 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3413 which REGs in HT. */
3415 static void
3416 compute_insns_for_mem (insns, last_insn, ht)
3417 rtx insns;
3418 rtx last_insn;
3419 htab_t ht;
3421 rtx insn;
3422 struct insns_for_mem_walk_info ifmwi;
3423 ifmwi.ht = ht;
3425 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3426 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3427 if (INSN_P (insn))
3429 ifmwi.insn = insn;
3430 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3434 /* Helper function for purge_addressof called through for_each_rtx.
3435 Returns true iff the rtl is an ADDRESSOF. */
3437 static int
3438 is_addressof (rtl, data)
3439 rtx *rtl;
3440 void *data ATTRIBUTE_UNUSED;
3442 return GET_CODE (*rtl) == ADDRESSOF;
3445 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3446 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3447 stack. */
3449 void
3450 purge_addressof (insns)
3451 rtx insns;
3453 rtx insn, tmp;
3454 htab_t ht;
3456 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3457 requires a fixup pass over the instruction stream to correct
3458 INSNs that depended on the REG being a REG, and not a MEM. But,
3459 these fixup passes are slow. Furthermore, most MEMs are not
3460 mentioned in very many instructions. So, we speed up the process
3461 by pre-calculating which REGs occur in which INSNs; that allows
3462 us to perform the fixup passes much more quickly. */
3463 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3464 compute_insns_for_mem (insns, NULL_RTX, ht);
3466 postponed_insns = NULL;
3468 for (insn = insns; insn; insn = NEXT_INSN (insn))
3469 if (INSN_P (insn))
3471 if (! purge_addressof_1 (&PATTERN (insn), insn,
3472 asm_noperands (PATTERN (insn)) > 0, 0, 1, ht))
3473 /* If we could not replace the ADDRESSOFs in the insn,
3474 something is wrong. */
3475 abort ();
3477 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, 0, ht))
3479 /* If we could not replace the ADDRESSOFs in the insn's notes,
3480 we can just remove the offending notes instead. */
3481 rtx note;
3483 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3485 /* If we find a REG_RETVAL note then the insn is a libcall.
3486 Such insns must have REG_EQUAL notes as well, in order
3487 for later passes of the compiler to work. So it is not
3488 safe to delete the notes here, and instead we abort. */
3489 if (REG_NOTE_KIND (note) == REG_RETVAL)
3490 abort ();
3491 if (for_each_rtx (&note, is_addressof, NULL))
3492 remove_note (insn, note);
3497 /* Process the postponed insns. */
3498 while (postponed_insns)
3500 insn = XEXP (postponed_insns, 0);
3501 tmp = postponed_insns;
3502 postponed_insns = XEXP (postponed_insns, 1);
3503 free_INSN_LIST_node (tmp);
3505 if (! purge_addressof_1 (&PATTERN (insn), insn,
3506 asm_noperands (PATTERN (insn)) > 0, 0, 0, ht))
3507 abort ();
3510 /* Clean up. */
3511 purge_bitfield_addressof_replacements = 0;
3512 purge_addressof_replacements = 0;
3514 /* REGs are shared. purge_addressof will destructively replace a REG
3515 with a MEM, which creates shared MEMs.
3517 Unfortunately, the children of put_reg_into_stack assume that MEMs
3518 referring to the same stack slot are shared (fixup_var_refs and
3519 the associated hash table code).
3521 So, we have to do another unsharing pass after we have flushed any
3522 REGs that had their address taken into the stack.
3524 It may be worth tracking whether or not we converted any REGs into
3525 MEMs to avoid this overhead when it is not needed. */
3526 unshare_all_rtl_again (get_insns ());
3529 /* Convert a SET of a hard subreg to a set of the appropriate hard
3530 register. A subroutine of purge_hard_subreg_sets. */
3532 static void
3533 purge_single_hard_subreg_set (pattern)
3534 rtx pattern;
3536 rtx reg = SET_DEST (pattern);
3537 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3538 int offset = 0;
3540 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3541 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3543 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3544 GET_MODE (SUBREG_REG (reg)),
3545 SUBREG_BYTE (reg),
3546 GET_MODE (reg));
3547 reg = SUBREG_REG (reg);
3551 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3553 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3554 SET_DEST (pattern) = reg;
3558 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3559 only such SETs that we expect to see are those left in because
3560 integrate can't handle sets of parts of a return value register.
3562 We don't use alter_subreg because we only want to eliminate subregs
3563 of hard registers. */
3565 void
3566 purge_hard_subreg_sets (insn)
3567 rtx insn;
3569 for (; insn; insn = NEXT_INSN (insn))
3571 if (INSN_P (insn))
3573 rtx pattern = PATTERN (insn);
3574 switch (GET_CODE (pattern))
3576 case SET:
3577 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3578 purge_single_hard_subreg_set (pattern);
3579 break;
3580 case PARALLEL:
3582 int j;
3583 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3585 rtx inner_pattern = XVECEXP (pattern, 0, j);
3586 if (GET_CODE (inner_pattern) == SET
3587 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3588 purge_single_hard_subreg_set (inner_pattern);
3591 break;
3592 default:
3593 break;
3599 /* Pass through the INSNS of function FNDECL and convert virtual register
3600 references to hard register references. */
3602 void
3603 instantiate_virtual_regs (fndecl, insns)
3604 tree fndecl;
3605 rtx insns;
3607 rtx insn;
3608 unsigned int i;
3610 /* Compute the offsets to use for this function. */
3611 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3612 var_offset = STARTING_FRAME_OFFSET;
3613 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3614 out_arg_offset = STACK_POINTER_OFFSET;
3615 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3617 /* Scan all variables and parameters of this function. For each that is
3618 in memory, instantiate all virtual registers if the result is a valid
3619 address. If not, we do it later. That will handle most uses of virtual
3620 regs on many machines. */
3621 instantiate_decls (fndecl, 1);
3623 /* Initialize recognition, indicating that volatile is OK. */
3624 init_recog ();
3626 /* Scan through all the insns, instantiating every virtual register still
3627 present. */
3628 for (insn = insns; insn; insn = NEXT_INSN (insn))
3629 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3630 || GET_CODE (insn) == CALL_INSN)
3632 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3633 if (INSN_DELETED_P (insn))
3634 continue;
3635 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3636 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3637 if (GET_CODE (insn) == CALL_INSN)
3638 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3639 NULL_RTX, 0);
3641 /* Past this point all ASM statements should match. Verify that
3642 to avoid failures later in the compilation process. */
3643 if (asm_noperands (PATTERN (insn)) >= 0
3644 && ! check_asm_operands (PATTERN (insn)))
3645 instantiate_virtual_regs_lossage (insn);
3648 /* Instantiate the stack slots for the parm registers, for later use in
3649 addressof elimination. */
3650 for (i = 0; i < max_parm_reg; ++i)
3651 if (parm_reg_stack_loc[i])
3652 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3654 /* Now instantiate the remaining register equivalences for debugging info.
3655 These will not be valid addresses. */
3656 instantiate_decls (fndecl, 0);
3658 /* Indicate that, from now on, assign_stack_local should use
3659 frame_pointer_rtx. */
3660 virtuals_instantiated = 1;
3663 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3664 all virtual registers in their DECL_RTL's.
3666 If VALID_ONLY, do this only if the resulting address is still valid.
3667 Otherwise, always do it. */
3669 static void
3670 instantiate_decls (fndecl, valid_only)
3671 tree fndecl;
3672 int valid_only;
3674 tree decl;
3676 /* Process all parameters of the function. */
3677 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3679 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3680 HOST_WIDE_INT size_rtl;
3682 instantiate_decl (DECL_RTL (decl), size, valid_only);
3684 /* If the parameter was promoted, then the incoming RTL mode may be
3685 larger than the declared type size. We must use the larger of
3686 the two sizes. */
3687 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3688 size = MAX (size_rtl, size);
3689 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3692 /* Now process all variables defined in the function or its subblocks. */
3693 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3696 /* Subroutine of instantiate_decls: Process all decls in the given
3697 BLOCK node and all its subblocks. */
3699 static void
3700 instantiate_decls_1 (let, valid_only)
3701 tree let;
3702 int valid_only;
3704 tree t;
3706 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3707 if (DECL_RTL_SET_P (t))
3708 instantiate_decl (DECL_RTL (t),
3709 int_size_in_bytes (TREE_TYPE (t)),
3710 valid_only);
3712 /* Process all subblocks. */
3713 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3714 instantiate_decls_1 (t, valid_only);
3717 /* Subroutine of the preceding procedures: Given RTL representing a
3718 decl and the size of the object, do any instantiation required.
3720 If VALID_ONLY is nonzero, it means that the RTL should only be
3721 changed if the new address is valid. */
3723 static void
3724 instantiate_decl (x, size, valid_only)
3725 rtx x;
3726 HOST_WIDE_INT size;
3727 int valid_only;
3729 enum machine_mode mode;
3730 rtx addr;
3732 /* If this is not a MEM, no need to do anything. Similarly if the
3733 address is a constant or a register that is not a virtual register. */
3735 if (x == 0 || GET_CODE (x) != MEM)
3736 return;
3738 addr = XEXP (x, 0);
3739 if (CONSTANT_P (addr)
3740 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3741 || (GET_CODE (addr) == REG
3742 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3743 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3744 return;
3746 /* If we should only do this if the address is valid, copy the address.
3747 We need to do this so we can undo any changes that might make the
3748 address invalid. This copy is unfortunate, but probably can't be
3749 avoided. */
3751 if (valid_only)
3752 addr = copy_rtx (addr);
3754 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3756 if (valid_only && size >= 0)
3758 unsigned HOST_WIDE_INT decl_size = size;
3760 /* Now verify that the resulting address is valid for every integer or
3761 floating-point mode up to and including SIZE bytes long. We do this
3762 since the object might be accessed in any mode and frame addresses
3763 are shared. */
3765 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3766 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3767 mode = GET_MODE_WIDER_MODE (mode))
3768 if (! memory_address_p (mode, addr))
3769 return;
3771 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3772 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3773 mode = GET_MODE_WIDER_MODE (mode))
3774 if (! memory_address_p (mode, addr))
3775 return;
3778 /* Put back the address now that we have updated it and we either know
3779 it is valid or we don't care whether it is valid. */
3781 XEXP (x, 0) = addr;
3784 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3785 is a virtual register, return the equivalent hard register and set the
3786 offset indirectly through the pointer. Otherwise, return 0. */
3788 static rtx
3789 instantiate_new_reg (x, poffset)
3790 rtx x;
3791 HOST_WIDE_INT *poffset;
3793 rtx new;
3794 HOST_WIDE_INT offset;
3796 if (x == virtual_incoming_args_rtx)
3797 new = arg_pointer_rtx, offset = in_arg_offset;
3798 else if (x == virtual_stack_vars_rtx)
3799 new = frame_pointer_rtx, offset = var_offset;
3800 else if (x == virtual_stack_dynamic_rtx)
3801 new = stack_pointer_rtx, offset = dynamic_offset;
3802 else if (x == virtual_outgoing_args_rtx)
3803 new = stack_pointer_rtx, offset = out_arg_offset;
3804 else if (x == virtual_cfa_rtx)
3805 new = arg_pointer_rtx, offset = cfa_offset;
3806 else
3807 return 0;
3809 *poffset = offset;
3810 return new;
3814 /* Called when instantiate_virtual_regs has failed to update the instruction.
3815 Usually this means that non-matching instruction has been emit, however for
3816 asm statements it may be the problem in the constraints. */
3817 static void
3818 instantiate_virtual_regs_lossage (insn)
3819 rtx insn;
3821 if (asm_noperands (PATTERN (insn)) >= 0)
3823 error_for_asm (insn, "impossible constraint in `asm'");
3824 delete_insn (insn);
3826 else
3827 abort ();
3829 /* Given a pointer to a piece of rtx and an optional pointer to the
3830 containing object, instantiate any virtual registers present in it.
3832 If EXTRA_INSNS, we always do the replacement and generate
3833 any extra insns before OBJECT. If it zero, we do nothing if replacement
3834 is not valid.
3836 Return 1 if we either had nothing to do or if we were able to do the
3837 needed replacement. Return 0 otherwise; we only return zero if
3838 EXTRA_INSNS is zero.
3840 We first try some simple transformations to avoid the creation of extra
3841 pseudos. */
3843 static int
3844 instantiate_virtual_regs_1 (loc, object, extra_insns)
3845 rtx *loc;
3846 rtx object;
3847 int extra_insns;
3849 rtx x;
3850 RTX_CODE code;
3851 rtx new = 0;
3852 HOST_WIDE_INT offset = 0;
3853 rtx temp;
3854 rtx seq;
3855 int i, j;
3856 const char *fmt;
3858 /* Re-start here to avoid recursion in common cases. */
3859 restart:
3861 x = *loc;
3862 if (x == 0)
3863 return 1;
3865 /* We may have detected and deleted invalid asm statements. */
3866 if (object && INSN_P (object) && INSN_DELETED_P (object))
3867 return 1;
3869 code = GET_CODE (x);
3871 /* Check for some special cases. */
3872 switch (code)
3874 case CONST_INT:
3875 case CONST_DOUBLE:
3876 case CONST_VECTOR:
3877 case CONST:
3878 case SYMBOL_REF:
3879 case CODE_LABEL:
3880 case PC:
3881 case CC0:
3882 case ASM_INPUT:
3883 case ADDR_VEC:
3884 case ADDR_DIFF_VEC:
3885 case RETURN:
3886 return 1;
3888 case SET:
3889 /* We are allowed to set the virtual registers. This means that
3890 the actual register should receive the source minus the
3891 appropriate offset. This is used, for example, in the handling
3892 of non-local gotos. */
3893 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3895 rtx src = SET_SRC (x);
3897 /* We are setting the register, not using it, so the relevant
3898 offset is the negative of the offset to use were we using
3899 the register. */
3900 offset = - offset;
3901 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3903 /* The only valid sources here are PLUS or REG. Just do
3904 the simplest possible thing to handle them. */
3905 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3907 instantiate_virtual_regs_lossage (object);
3908 return 1;
3911 start_sequence ();
3912 if (GET_CODE (src) != REG)
3913 temp = force_operand (src, NULL_RTX);
3914 else
3915 temp = src;
3916 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3917 seq = get_insns ();
3918 end_sequence ();
3920 emit_insn_before (seq, object);
3921 SET_DEST (x) = new;
3923 if (! validate_change (object, &SET_SRC (x), temp, 0)
3924 || ! extra_insns)
3925 instantiate_virtual_regs_lossage (object);
3927 return 1;
3930 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3931 loc = &SET_SRC (x);
3932 goto restart;
3934 case PLUS:
3935 /* Handle special case of virtual register plus constant. */
3936 if (CONSTANT_P (XEXP (x, 1)))
3938 rtx old, new_offset;
3940 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3941 if (GET_CODE (XEXP (x, 0)) == PLUS)
3943 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3945 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3946 extra_insns);
3947 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3949 else
3951 loc = &XEXP (x, 0);
3952 goto restart;
3956 #ifdef POINTERS_EXTEND_UNSIGNED
3957 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3958 we can commute the PLUS and SUBREG because pointers into the
3959 frame are well-behaved. */
3960 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3961 && GET_CODE (XEXP (x, 1)) == CONST_INT
3962 && 0 != (new
3963 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3964 &offset))
3965 && validate_change (object, loc,
3966 plus_constant (gen_lowpart (ptr_mode,
3967 new),
3968 offset
3969 + INTVAL (XEXP (x, 1))),
3971 return 1;
3972 #endif
3973 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3975 /* We know the second operand is a constant. Unless the
3976 first operand is a REG (which has been already checked),
3977 it needs to be checked. */
3978 if (GET_CODE (XEXP (x, 0)) != REG)
3980 loc = &XEXP (x, 0);
3981 goto restart;
3983 return 1;
3986 new_offset = plus_constant (XEXP (x, 1), offset);
3988 /* If the new constant is zero, try to replace the sum with just
3989 the register. */
3990 if (new_offset == const0_rtx
3991 && validate_change (object, loc, new, 0))
3992 return 1;
3994 /* Next try to replace the register and new offset.
3995 There are two changes to validate here and we can't assume that
3996 in the case of old offset equals new just changing the register
3997 will yield a valid insn. In the interests of a little efficiency,
3998 however, we only call validate change once (we don't queue up the
3999 changes and then call apply_change_group). */
4001 old = XEXP (x, 0);
4002 if (offset == 0
4003 ? ! validate_change (object, &XEXP (x, 0), new, 0)
4004 : (XEXP (x, 0) = new,
4005 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
4007 if (! extra_insns)
4009 XEXP (x, 0) = old;
4010 return 0;
4013 /* Otherwise copy the new constant into a register and replace
4014 constant with that register. */
4015 temp = gen_reg_rtx (Pmode);
4016 XEXP (x, 0) = new;
4017 if (validate_change (object, &XEXP (x, 1), temp, 0))
4018 emit_insn_before (gen_move_insn (temp, new_offset), object);
4019 else
4021 /* If that didn't work, replace this expression with a
4022 register containing the sum. */
4024 XEXP (x, 0) = old;
4025 new = gen_rtx_PLUS (Pmode, new, new_offset);
4027 start_sequence ();
4028 temp = force_operand (new, NULL_RTX);
4029 seq = get_insns ();
4030 end_sequence ();
4032 emit_insn_before (seq, object);
4033 if (! validate_change (object, loc, temp, 0)
4034 && ! validate_replace_rtx (x, temp, object))
4036 instantiate_virtual_regs_lossage (object);
4037 return 1;
4042 return 1;
4045 /* Fall through to generic two-operand expression case. */
4046 case EXPR_LIST:
4047 case CALL:
4048 case COMPARE:
4049 case MINUS:
4050 case MULT:
4051 case DIV: case UDIV:
4052 case MOD: case UMOD:
4053 case AND: case IOR: case XOR:
4054 case ROTATERT: case ROTATE:
4055 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4056 case NE: case EQ:
4057 case GE: case GT: case GEU: case GTU:
4058 case LE: case LT: case LEU: case LTU:
4059 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
4060 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
4061 loc = &XEXP (x, 0);
4062 goto restart;
4064 case MEM:
4065 /* Most cases of MEM that convert to valid addresses have already been
4066 handled by our scan of decls. The only special handling we
4067 need here is to make a copy of the rtx to ensure it isn't being
4068 shared if we have to change it to a pseudo.
4070 If the rtx is a simple reference to an address via a virtual register,
4071 it can potentially be shared. In such cases, first try to make it
4072 a valid address, which can also be shared. Otherwise, copy it and
4073 proceed normally.
4075 First check for common cases that need no processing. These are
4076 usually due to instantiation already being done on a previous instance
4077 of a shared rtx. */
4079 temp = XEXP (x, 0);
4080 if (CONSTANT_ADDRESS_P (temp)
4081 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4082 || temp == arg_pointer_rtx
4083 #endif
4084 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4085 || temp == hard_frame_pointer_rtx
4086 #endif
4087 || temp == frame_pointer_rtx)
4088 return 1;
4090 if (GET_CODE (temp) == PLUS
4091 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4092 && (XEXP (temp, 0) == frame_pointer_rtx
4093 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4094 || XEXP (temp, 0) == hard_frame_pointer_rtx
4095 #endif
4096 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4097 || XEXP (temp, 0) == arg_pointer_rtx
4098 #endif
4100 return 1;
4102 if (temp == virtual_stack_vars_rtx
4103 || temp == virtual_incoming_args_rtx
4104 || (GET_CODE (temp) == PLUS
4105 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4106 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4107 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4109 /* This MEM may be shared. If the substitution can be done without
4110 the need to generate new pseudos, we want to do it in place
4111 so all copies of the shared rtx benefit. The call below will
4112 only make substitutions if the resulting address is still
4113 valid.
4115 Note that we cannot pass X as the object in the recursive call
4116 since the insn being processed may not allow all valid
4117 addresses. However, if we were not passed on object, we can
4118 only modify X without copying it if X will have a valid
4119 address.
4121 ??? Also note that this can still lose if OBJECT is an insn that
4122 has less restrictions on an address that some other insn.
4123 In that case, we will modify the shared address. This case
4124 doesn't seem very likely, though. One case where this could
4125 happen is in the case of a USE or CLOBBER reference, but we
4126 take care of that below. */
4128 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4129 object ? object : x, 0))
4130 return 1;
4132 /* Otherwise make a copy and process that copy. We copy the entire
4133 RTL expression since it might be a PLUS which could also be
4134 shared. */
4135 *loc = x = copy_rtx (x);
4138 /* Fall through to generic unary operation case. */
4139 case PREFETCH:
4140 case SUBREG:
4141 case STRICT_LOW_PART:
4142 case NEG: case NOT:
4143 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4144 case SIGN_EXTEND: case ZERO_EXTEND:
4145 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4146 case FLOAT: case FIX:
4147 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4148 case ABS:
4149 case SQRT:
4150 case FFS:
4151 case CLZ: case CTZ:
4152 case POPCOUNT: case PARITY:
4153 /* These case either have just one operand or we know that we need not
4154 check the rest of the operands. */
4155 loc = &XEXP (x, 0);
4156 goto restart;
4158 case USE:
4159 case CLOBBER:
4160 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4161 go ahead and make the invalid one, but do it to a copy. For a REG,
4162 just make the recursive call, since there's no chance of a problem. */
4164 if ((GET_CODE (XEXP (x, 0)) == MEM
4165 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4167 || (GET_CODE (XEXP (x, 0)) == REG
4168 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4169 return 1;
4171 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4172 loc = &XEXP (x, 0);
4173 goto restart;
4175 case REG:
4176 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4177 in front of this insn and substitute the temporary. */
4178 if ((new = instantiate_new_reg (x, &offset)) != 0)
4180 temp = plus_constant (new, offset);
4181 if (!validate_change (object, loc, temp, 0))
4183 if (! extra_insns)
4184 return 0;
4186 start_sequence ();
4187 temp = force_operand (temp, NULL_RTX);
4188 seq = get_insns ();
4189 end_sequence ();
4191 emit_insn_before (seq, object);
4192 if (! validate_change (object, loc, temp, 0)
4193 && ! validate_replace_rtx (x, temp, object))
4194 instantiate_virtual_regs_lossage (object);
4198 return 1;
4200 case ADDRESSOF:
4201 if (GET_CODE (XEXP (x, 0)) == REG)
4202 return 1;
4204 else if (GET_CODE (XEXP (x, 0)) == MEM)
4206 /* If we have a (addressof (mem ..)), do any instantiation inside
4207 since we know we'll be making the inside valid when we finally
4208 remove the ADDRESSOF. */
4209 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4210 return 1;
4212 break;
4214 default:
4215 break;
4218 /* Scan all subexpressions. */
4219 fmt = GET_RTX_FORMAT (code);
4220 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4221 if (*fmt == 'e')
4223 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4224 return 0;
4226 else if (*fmt == 'E')
4227 for (j = 0; j < XVECLEN (x, i); j++)
4228 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4229 extra_insns))
4230 return 0;
4232 return 1;
4235 /* Optimization: assuming this function does not receive nonlocal gotos,
4236 delete the handlers for such, as well as the insns to establish
4237 and disestablish them. */
4239 static void
4240 delete_handlers ()
4242 rtx insn;
4243 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4245 /* Delete the handler by turning off the flag that would
4246 prevent jump_optimize from deleting it.
4247 Also permit deletion of the nonlocal labels themselves
4248 if nothing local refers to them. */
4249 if (GET_CODE (insn) == CODE_LABEL)
4251 tree t, last_t;
4253 LABEL_PRESERVE_P (insn) = 0;
4255 /* Remove it from the nonlocal_label list, to avoid confusing
4256 flow. */
4257 for (t = nonlocal_labels, last_t = 0; t;
4258 last_t = t, t = TREE_CHAIN (t))
4259 if (DECL_RTL (TREE_VALUE (t)) == insn)
4260 break;
4261 if (t)
4263 if (! last_t)
4264 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4265 else
4266 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4269 if (GET_CODE (insn) == INSN)
4271 int can_delete = 0;
4272 rtx t;
4273 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4274 if (reg_mentioned_p (t, PATTERN (insn)))
4276 can_delete = 1;
4277 break;
4279 if (can_delete
4280 || (nonlocal_goto_stack_level != 0
4281 && reg_mentioned_p (nonlocal_goto_stack_level,
4282 PATTERN (insn))))
4283 delete_related_insns (insn);
4288 /* Return the first insn following those generated by `assign_parms'. */
4291 get_first_nonparm_insn ()
4293 if (last_parm_insn)
4294 return NEXT_INSN (last_parm_insn);
4295 return get_insns ();
4298 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4299 This means a type for which function calls must pass an address to the
4300 function or get an address back from the function.
4301 EXP may be a type node or an expression (whose type is tested). */
4304 aggregate_value_p (exp)
4305 tree exp;
4307 int i, regno, nregs;
4308 rtx reg;
4310 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4312 if (TREE_CODE (type) == VOID_TYPE)
4313 return 0;
4314 if (RETURN_IN_MEMORY (type))
4315 return 1;
4316 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4317 and thus can't be returned in registers. */
4318 if (TREE_ADDRESSABLE (type))
4319 return 1;
4320 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4321 return 1;
4322 /* Make sure we have suitable call-clobbered regs to return
4323 the value in; if not, we must return it in memory. */
4324 reg = hard_function_value (type, 0, 0);
4326 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4327 it is OK. */
4328 if (GET_CODE (reg) != REG)
4329 return 0;
4331 regno = REGNO (reg);
4332 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4333 for (i = 0; i < nregs; i++)
4334 if (! call_used_regs[regno + i])
4335 return 1;
4336 return 0;
4339 /* Assign RTL expressions to the function's parameters.
4340 This may involve copying them into registers and using
4341 those registers as the RTL for them. */
4343 void
4344 assign_parms (fndecl)
4345 tree fndecl;
4347 tree parm;
4348 CUMULATIVE_ARGS args_so_far;
4349 /* Total space needed so far for args on the stack,
4350 given as a constant and a tree-expression. */
4351 struct args_size stack_args_size;
4352 tree fntype = TREE_TYPE (fndecl);
4353 tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
4354 /* This is used for the arg pointer when referring to stack args. */
4355 rtx internal_arg_pointer;
4356 /* This is a dummy PARM_DECL that we used for the function result if
4357 the function returns a structure. */
4358 tree function_result_decl = 0;
4359 #ifdef SETUP_INCOMING_VARARGS
4360 int varargs_setup = 0;
4361 #endif
4362 int reg_parm_stack_space = 0;
4363 rtx conversion_insns = 0;
4365 /* Nonzero if function takes extra anonymous args.
4366 This means the last named arg must be on the stack
4367 right before the anonymous ones. */
4368 int stdarg
4369 = (TYPE_ARG_TYPES (fntype) != 0
4370 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4371 != void_type_node));
4373 current_function_stdarg = stdarg;
4375 /* If the reg that the virtual arg pointer will be translated into is
4376 not a fixed reg or is the stack pointer, make a copy of the virtual
4377 arg pointer, and address parms via the copy. The frame pointer is
4378 considered fixed even though it is not marked as such.
4380 The second time through, simply use ap to avoid generating rtx. */
4382 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4383 || ! (fixed_regs[ARG_POINTER_REGNUM]
4384 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4385 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4386 else
4387 internal_arg_pointer = virtual_incoming_args_rtx;
4388 current_function_internal_arg_pointer = internal_arg_pointer;
4390 stack_args_size.constant = 0;
4391 stack_args_size.var = 0;
4393 /* If struct value address is treated as the first argument, make it so. */
4394 if (aggregate_value_p (DECL_RESULT (fndecl))
4395 && ! current_function_returns_pcc_struct
4396 && struct_value_incoming_rtx == 0)
4398 tree type = build_pointer_type (TREE_TYPE (fntype));
4400 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4402 DECL_ARG_TYPE (function_result_decl) = type;
4403 TREE_CHAIN (function_result_decl) = fnargs;
4404 fnargs = function_result_decl;
4407 orig_fnargs = fnargs;
4409 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4410 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4412 if (SPLIT_COMPLEX_ARGS)
4413 fnargs = split_complex_args (fnargs);
4415 #ifdef REG_PARM_STACK_SPACE
4416 #ifdef MAYBE_REG_PARM_STACK_SPACE
4417 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4418 #else
4419 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4420 #endif
4421 #endif
4423 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4424 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4425 #else
4426 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl);
4427 #endif
4429 /* We haven't yet found an argument that we must push and pretend the
4430 caller did. */
4431 current_function_pretend_args_size = 0;
4433 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4435 rtx entry_parm;
4436 rtx stack_parm;
4437 enum machine_mode promoted_mode, passed_mode;
4438 enum machine_mode nominal_mode, promoted_nominal_mode;
4439 int unsignedp;
4440 struct locate_and_pad_arg_data locate;
4441 int passed_pointer = 0;
4442 int did_conversion = 0;
4443 tree passed_type = DECL_ARG_TYPE (parm);
4444 tree nominal_type = TREE_TYPE (parm);
4445 int last_named = 0, named_arg;
4446 int in_regs;
4447 int partial = 0;
4449 /* Set LAST_NAMED if this is last named arg before last
4450 anonymous args. */
4451 if (stdarg)
4453 tree tem;
4455 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4456 if (DECL_NAME (tem))
4457 break;
4459 if (tem == 0)
4460 last_named = 1;
4462 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4463 most machines, if this is a varargs/stdarg function, then we treat
4464 the last named arg as if it were anonymous too. */
4465 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4467 if (TREE_TYPE (parm) == error_mark_node
4468 /* This can happen after weird syntax errors
4469 or if an enum type is defined among the parms. */
4470 || TREE_CODE (parm) != PARM_DECL
4471 || passed_type == NULL)
4473 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4474 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4475 TREE_USED (parm) = 1;
4476 continue;
4479 /* Find mode of arg as it is passed, and mode of arg
4480 as it should be during execution of this function. */
4481 passed_mode = TYPE_MODE (passed_type);
4482 nominal_mode = TYPE_MODE (nominal_type);
4484 /* If the parm's mode is VOID, its value doesn't matter,
4485 and avoid the usual things like emit_move_insn that could crash. */
4486 if (nominal_mode == VOIDmode)
4488 SET_DECL_RTL (parm, const0_rtx);
4489 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4490 continue;
4493 /* If the parm is to be passed as a transparent union, use the
4494 type of the first field for the tests below. We have already
4495 verified that the modes are the same. */
4496 if (DECL_TRANSPARENT_UNION (parm)
4497 || (TREE_CODE (passed_type) == UNION_TYPE
4498 && TYPE_TRANSPARENT_UNION (passed_type)))
4499 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4501 /* See if this arg was passed by invisible reference. It is if
4502 it is an object whose size depends on the contents of the
4503 object itself or if the machine requires these objects be passed
4504 that way. */
4506 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
4507 || TREE_ADDRESSABLE (passed_type)
4508 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4509 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4510 passed_type, named_arg)
4511 #endif
4514 passed_type = nominal_type = build_pointer_type (passed_type);
4515 passed_pointer = 1;
4516 passed_mode = nominal_mode = Pmode;
4518 /* See if the frontend wants to pass this by invisible reference. */
4519 else if (passed_type != nominal_type
4520 && POINTER_TYPE_P (passed_type)
4521 && TREE_TYPE (passed_type) == nominal_type)
4523 nominal_type = passed_type;
4524 passed_pointer = 1;
4525 passed_mode = nominal_mode = Pmode;
4528 promoted_mode = passed_mode;
4530 #ifdef PROMOTE_FUNCTION_ARGS
4531 /* Compute the mode in which the arg is actually extended to. */
4532 unsignedp = TREE_UNSIGNED (passed_type);
4533 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4534 #endif
4536 /* Let machine desc say which reg (if any) the parm arrives in.
4537 0 means it arrives on the stack. */
4538 #ifdef FUNCTION_INCOMING_ARG
4539 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4540 passed_type, named_arg);
4541 #else
4542 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4543 passed_type, named_arg);
4544 #endif
4546 if (entry_parm == 0)
4547 promoted_mode = passed_mode;
4549 #ifdef SETUP_INCOMING_VARARGS
4550 /* If this is the last named parameter, do any required setup for
4551 varargs or stdargs. We need to know about the case of this being an
4552 addressable type, in which case we skip the registers it
4553 would have arrived in.
4555 For stdargs, LAST_NAMED will be set for two parameters, the one that
4556 is actually the last named, and the dummy parameter. We only
4557 want to do this action once.
4559 Also, indicate when RTL generation is to be suppressed. */
4560 if (last_named && !varargs_setup)
4562 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4563 current_function_pretend_args_size, 0);
4564 varargs_setup = 1;
4566 #endif
4568 /* Determine parm's home in the stack,
4569 in case it arrives in the stack or we should pretend it did.
4571 Compute the stack position and rtx where the argument arrives
4572 and its size.
4574 There is one complexity here: If this was a parameter that would
4575 have been passed in registers, but wasn't only because it is
4576 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4577 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4578 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4579 0 as it was the previous time. */
4580 in_regs = entry_parm != 0;
4581 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4582 in_regs = 1;
4583 #endif
4584 if (!in_regs && !named_arg)
4586 int pretend_named = PRETEND_OUTGOING_VARARGS_NAMED;
4587 if (pretend_named)
4589 #ifdef FUNCTION_INCOMING_ARG
4590 in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4591 passed_type,
4592 pretend_named) != 0;
4593 #else
4594 in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
4595 passed_type,
4596 pretend_named) != 0;
4597 #endif
4601 /* If this parameter was passed both in registers and in the stack,
4602 use the copy on the stack. */
4603 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4604 entry_parm = 0;
4606 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4607 if (entry_parm)
4608 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4609 passed_type, named_arg);
4610 #endif
4612 memset (&locate, 0, sizeof (locate));
4613 locate_and_pad_parm (promoted_mode, passed_type, in_regs,
4614 entry_parm ? partial : 0, fndecl,
4615 &stack_args_size, &locate);
4618 rtx offset_rtx;
4620 /* If we're passing this arg using a reg, make its stack home
4621 the aligned stack slot. */
4622 if (entry_parm)
4623 offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
4624 else
4625 offset_rtx = ARGS_SIZE_RTX (locate.offset);
4627 if (offset_rtx == const0_rtx)
4628 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4629 else
4630 stack_parm = gen_rtx_MEM (promoted_mode,
4631 gen_rtx_PLUS (Pmode,
4632 internal_arg_pointer,
4633 offset_rtx));
4635 set_mem_attributes (stack_parm, parm, 1);
4637 /* Set also REG_ATTRS if parameter was passed in a register. */
4638 if (entry_parm)
4639 set_reg_attrs_for_parm (entry_parm, stack_parm);
4642 /* If this parm was passed part in regs and part in memory,
4643 pretend it arrived entirely in memory
4644 by pushing the register-part onto the stack.
4646 In the special case of a DImode or DFmode that is split,
4647 we could put it together in a pseudoreg directly,
4648 but for now that's not worth bothering with. */
4650 if (partial)
4652 #ifndef MAYBE_REG_PARM_STACK_SPACE
4653 /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4654 split parameters was allocated by our caller, so we
4655 won't be pushing it in the prolog. */
4656 if (reg_parm_stack_space == 0)
4657 #endif
4658 current_function_pretend_args_size
4659 = (((partial * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4660 / (PARM_BOUNDARY / BITS_PER_UNIT)
4661 * (PARM_BOUNDARY / BITS_PER_UNIT));
4663 /* Handle calls that pass values in multiple non-contiguous
4664 locations. The Irix 6 ABI has examples of this. */
4665 if (GET_CODE (entry_parm) == PARALLEL)
4666 emit_group_store (validize_mem (stack_parm), entry_parm,
4667 int_size_in_bytes (TREE_TYPE (parm)));
4669 else
4670 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
4671 partial);
4673 entry_parm = stack_parm;
4676 /* If we didn't decide this parm came in a register,
4677 by default it came on the stack. */
4678 if (entry_parm == 0)
4679 entry_parm = stack_parm;
4681 /* Record permanently how this parm was passed. */
4682 DECL_INCOMING_RTL (parm) = entry_parm;
4684 /* If there is actually space on the stack for this parm,
4685 count it in stack_args_size; otherwise set stack_parm to 0
4686 to indicate there is no preallocated stack slot for the parm. */
4688 if (entry_parm == stack_parm
4689 || (GET_CODE (entry_parm) == PARALLEL
4690 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4691 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4692 /* On some machines, even if a parm value arrives in a register
4693 there is still an (uninitialized) stack slot allocated for it.
4695 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4696 whether this parameter already has a stack slot allocated,
4697 because an arg block exists only if current_function_args_size
4698 is larger than some threshold, and we haven't calculated that
4699 yet. So, for now, we just assume that stack slots never exist
4700 in this case. */
4701 || REG_PARM_STACK_SPACE (fndecl) > 0
4702 #endif
4705 stack_args_size.constant += locate.size.constant;
4706 /* locate.size doesn't include the part in regs. */
4707 if (partial)
4708 stack_args_size.constant += current_function_pretend_args_size;
4709 if (locate.size.var)
4710 ADD_PARM_SIZE (stack_args_size, locate.size.var);
4712 else
4713 /* No stack slot was pushed for this parm. */
4714 stack_parm = 0;
4716 /* Update info on where next arg arrives in registers. */
4718 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4719 passed_type, named_arg);
4721 /* If we can't trust the parm stack slot to be aligned enough
4722 for its ultimate type, don't use that slot after entry.
4723 We'll make another stack slot, if we need one. */
4725 unsigned int thisparm_boundary
4726 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4728 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4729 stack_parm = 0;
4732 /* If parm was passed in memory, and we need to convert it on entry,
4733 don't store it back in that same slot. */
4734 if (entry_parm == stack_parm
4735 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4736 stack_parm = 0;
4738 /* When an argument is passed in multiple locations, we can't
4739 make use of this information, but we can save some copying if
4740 the whole argument is passed in a single register. */
4741 if (GET_CODE (entry_parm) == PARALLEL
4742 && nominal_mode != BLKmode && passed_mode != BLKmode)
4744 int i, len = XVECLEN (entry_parm, 0);
4746 for (i = 0; i < len; i++)
4747 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4748 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4749 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4750 == passed_mode)
4751 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4753 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4754 DECL_INCOMING_RTL (parm) = entry_parm;
4755 break;
4759 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4760 in the mode in which it arrives.
4761 STACK_PARM is an RTX for a stack slot where the parameter can live
4762 during the function (in case we want to put it there).
4763 STACK_PARM is 0 if no stack slot was pushed for it.
4765 Now output code if necessary to convert ENTRY_PARM to
4766 the type in which this function declares it,
4767 and store that result in an appropriate place,
4768 which may be a pseudo reg, may be STACK_PARM,
4769 or may be a local stack slot if STACK_PARM is 0.
4771 Set DECL_RTL to that place. */
4773 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4775 /* If a BLKmode arrives in registers, copy it to a stack slot.
4776 Handle calls that pass values in multiple non-contiguous
4777 locations. The Irix 6 ABI has examples of this. */
4778 if (GET_CODE (entry_parm) == REG
4779 || GET_CODE (entry_parm) == PARALLEL)
4781 int size = int_size_in_bytes (TREE_TYPE (parm));
4782 int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
4783 rtx mem;
4785 /* Note that we will be storing an integral number of words.
4786 So we have to be careful to ensure that we allocate an
4787 integral number of words. We do this below in the
4788 assign_stack_local if space was not allocated in the argument
4789 list. If it was, this will not work if PARM_BOUNDARY is not
4790 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4791 if it becomes a problem. */
4793 if (stack_parm == 0)
4795 stack_parm
4796 = assign_stack_local (GET_MODE (entry_parm),
4797 size_stored, 0);
4798 set_mem_attributes (stack_parm, parm, 1);
4801 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4802 abort ();
4804 mem = validize_mem (stack_parm);
4806 /* Handle calls that pass values in multiple non-contiguous
4807 locations. The Irix 6 ABI has examples of this. */
4808 if (GET_CODE (entry_parm) == PARALLEL)
4809 emit_group_store (mem, entry_parm, size);
4811 else if (size == 0)
4814 /* If SIZE is that of a mode no bigger than a word, just use
4815 that mode's store operation. */
4816 else if (size <= UNITS_PER_WORD)
4818 enum machine_mode mode
4819 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
4821 if (mode != BLKmode)
4823 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
4824 emit_move_insn (change_address (mem, mode, 0), reg);
4827 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4828 machine must be aligned to the left before storing
4829 to memory. Note that the previous test doesn't
4830 handle all cases (e.g. SIZE == 3). */
4831 else if (size != UNITS_PER_WORD
4832 && BYTES_BIG_ENDIAN)
4834 rtx tem, x;
4835 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4836 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
4838 x = expand_binop (word_mode, ashl_optab, reg,
4839 GEN_INT (by), 0, 1, OPTAB_WIDEN);
4840 tem = change_address (mem, word_mode, 0);
4841 emit_move_insn (tem, x);
4843 else
4844 move_block_from_reg (REGNO (entry_parm), mem,
4845 size_stored / UNITS_PER_WORD);
4847 else
4848 move_block_from_reg (REGNO (entry_parm), mem,
4849 size_stored / UNITS_PER_WORD);
4851 SET_DECL_RTL (parm, stack_parm);
4853 else if (! ((! optimize
4854 && ! DECL_REGISTER (parm))
4855 || TREE_SIDE_EFFECTS (parm)
4856 /* If -ffloat-store specified, don't put explicit
4857 float variables into registers. */
4858 || (flag_float_store
4859 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4860 /* Always assign pseudo to structure return or item passed
4861 by invisible reference. */
4862 || passed_pointer || parm == function_result_decl)
4864 /* Store the parm in a pseudoregister during the function, but we
4865 may need to do it in a wider mode. */
4867 rtx parmreg;
4868 unsigned int regno, regnoi = 0, regnor = 0;
4870 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4872 promoted_nominal_mode
4873 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4875 parmreg = gen_reg_rtx (promoted_nominal_mode);
4876 mark_user_reg (parmreg);
4878 /* If this was an item that we received a pointer to, set DECL_RTL
4879 appropriately. */
4880 if (passed_pointer)
4882 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4883 parmreg);
4884 set_mem_attributes (x, parm, 1);
4885 SET_DECL_RTL (parm, x);
4887 else
4889 SET_DECL_RTL (parm, parmreg);
4890 maybe_set_unchanging (DECL_RTL (parm), parm);
4893 /* Copy the value into the register. */
4894 if (nominal_mode != passed_mode
4895 || promoted_nominal_mode != promoted_mode)
4897 int save_tree_used;
4898 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4899 mode, by the caller. We now have to convert it to
4900 NOMINAL_MODE, if different. However, PARMREG may be in
4901 a different mode than NOMINAL_MODE if it is being stored
4902 promoted.
4904 If ENTRY_PARM is a hard register, it might be in a register
4905 not valid for operating in its mode (e.g., an odd-numbered
4906 register for a DFmode). In that case, moves are the only
4907 thing valid, so we can't do a convert from there. This
4908 occurs when the calling sequence allow such misaligned
4909 usages.
4911 In addition, the conversion may involve a call, which could
4912 clobber parameters which haven't been copied to pseudo
4913 registers yet. Therefore, we must first copy the parm to
4914 a pseudo reg here, and save the conversion until after all
4915 parameters have been moved. */
4917 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4919 emit_move_insn (tempreg, validize_mem (entry_parm));
4921 push_to_sequence (conversion_insns);
4922 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4924 if (GET_CODE (tempreg) == SUBREG
4925 && GET_MODE (tempreg) == nominal_mode
4926 && GET_CODE (SUBREG_REG (tempreg)) == REG
4927 && nominal_mode == passed_mode
4928 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4929 && GET_MODE_SIZE (GET_MODE (tempreg))
4930 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4932 /* The argument is already sign/zero extended, so note it
4933 into the subreg. */
4934 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4935 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4938 /* TREE_USED gets set erroneously during expand_assignment. */
4939 save_tree_used = TREE_USED (parm);
4940 expand_assignment (parm,
4941 make_tree (nominal_type, tempreg), 0, 0);
4942 TREE_USED (parm) = save_tree_used;
4943 conversion_insns = get_insns ();
4944 did_conversion = 1;
4945 end_sequence ();
4947 else
4948 emit_move_insn (parmreg, validize_mem (entry_parm));
4950 /* If we were passed a pointer but the actual value
4951 can safely live in a register, put it in one. */
4952 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4953 /* If by-reference argument was promoted, demote it. */
4954 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4955 || ! ((! optimize
4956 && ! DECL_REGISTER (parm))
4957 || TREE_SIDE_EFFECTS (parm)
4958 /* If -ffloat-store specified, don't put explicit
4959 float variables into registers. */
4960 || (flag_float_store
4961 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4963 /* We can't use nominal_mode, because it will have been set to
4964 Pmode above. We must use the actual mode of the parm. */
4965 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4966 mark_user_reg (parmreg);
4967 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4969 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4970 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4971 push_to_sequence (conversion_insns);
4972 emit_move_insn (tempreg, DECL_RTL (parm));
4973 SET_DECL_RTL (parm,
4974 convert_to_mode (GET_MODE (parmreg),
4975 tempreg,
4976 unsigned_p));
4977 emit_move_insn (parmreg, DECL_RTL (parm));
4978 conversion_insns = get_insns();
4979 did_conversion = 1;
4980 end_sequence ();
4982 else
4983 emit_move_insn (parmreg, DECL_RTL (parm));
4984 SET_DECL_RTL (parm, parmreg);
4985 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4986 now the parm. */
4987 stack_parm = 0;
4989 #ifdef FUNCTION_ARG_CALLEE_COPIES
4990 /* If we are passed an arg by reference and it is our responsibility
4991 to make a copy, do it now.
4992 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4993 original argument, so we must recreate them in the call to
4994 FUNCTION_ARG_CALLEE_COPIES. */
4995 /* ??? Later add code to handle the case that if the argument isn't
4996 modified, don't do the copy. */
4998 else if (passed_pointer
4999 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
5000 TYPE_MODE (DECL_ARG_TYPE (parm)),
5001 DECL_ARG_TYPE (parm),
5002 named_arg)
5003 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
5005 rtx copy;
5006 tree type = DECL_ARG_TYPE (parm);
5008 /* This sequence may involve a library call perhaps clobbering
5009 registers that haven't been copied to pseudos yet. */
5011 push_to_sequence (conversion_insns);
5013 if (!COMPLETE_TYPE_P (type)
5014 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5015 /* This is a variable sized object. */
5016 copy = gen_rtx_MEM (BLKmode,
5017 allocate_dynamic_stack_space
5018 (expr_size (parm), NULL_RTX,
5019 TYPE_ALIGN (type)));
5020 else
5021 copy = assign_stack_temp (TYPE_MODE (type),
5022 int_size_in_bytes (type), 1);
5023 set_mem_attributes (copy, parm, 1);
5025 store_expr (parm, copy, 0);
5026 emit_move_insn (parmreg, XEXP (copy, 0));
5027 conversion_insns = get_insns ();
5028 did_conversion = 1;
5029 end_sequence ();
5031 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5033 /* In any case, record the parm's desired stack location
5034 in case we later discover it must live in the stack.
5036 If it is a COMPLEX value, store the stack location for both
5037 halves. */
5039 if (GET_CODE (parmreg) == CONCAT)
5040 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
5041 else
5042 regno = REGNO (parmreg);
5044 if (regno >= max_parm_reg)
5046 rtx *new;
5047 int old_max_parm_reg = max_parm_reg;
5049 /* It's slow to expand this one register at a time,
5050 but it's also rare and we need max_parm_reg to be
5051 precisely correct. */
5052 max_parm_reg = regno + 1;
5053 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
5054 max_parm_reg * sizeof (rtx));
5055 memset ((char *) (new + old_max_parm_reg), 0,
5056 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5057 parm_reg_stack_loc = new;
5060 if (GET_CODE (parmreg) == CONCAT)
5062 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5064 regnor = REGNO (gen_realpart (submode, parmreg));
5065 regnoi = REGNO (gen_imagpart (submode, parmreg));
5067 if (stack_parm != 0)
5069 parm_reg_stack_loc[regnor]
5070 = gen_realpart (submode, stack_parm);
5071 parm_reg_stack_loc[regnoi]
5072 = gen_imagpart (submode, stack_parm);
5074 else
5076 parm_reg_stack_loc[regnor] = 0;
5077 parm_reg_stack_loc[regnoi] = 0;
5080 else
5081 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5083 /* Mark the register as eliminable if we did no conversion
5084 and it was copied from memory at a fixed offset,
5085 and the arg pointer was not copied to a pseudo-reg.
5086 If the arg pointer is a pseudo reg or the offset formed
5087 an invalid address, such memory-equivalences
5088 as we make here would screw up life analysis for it. */
5089 if (nominal_mode == passed_mode
5090 && ! did_conversion
5091 && stack_parm != 0
5092 && GET_CODE (stack_parm) == MEM
5093 && locate.offset.var == 0
5094 && reg_mentioned_p (virtual_incoming_args_rtx,
5095 XEXP (stack_parm, 0)))
5097 rtx linsn = get_last_insn ();
5098 rtx sinsn, set;
5100 /* Mark complex types separately. */
5101 if (GET_CODE (parmreg) == CONCAT)
5102 /* Scan backwards for the set of the real and
5103 imaginary parts. */
5104 for (sinsn = linsn; sinsn != 0;
5105 sinsn = prev_nonnote_insn (sinsn))
5107 set = single_set (sinsn);
5108 if (set != 0
5109 && SET_DEST (set) == regno_reg_rtx [regnoi])
5110 REG_NOTES (sinsn)
5111 = gen_rtx_EXPR_LIST (REG_EQUIV,
5112 parm_reg_stack_loc[regnoi],
5113 REG_NOTES (sinsn));
5114 else if (set != 0
5115 && SET_DEST (set) == regno_reg_rtx [regnor])
5116 REG_NOTES (sinsn)
5117 = gen_rtx_EXPR_LIST (REG_EQUIV,
5118 parm_reg_stack_loc[regnor],
5119 REG_NOTES (sinsn));
5121 else if ((set = single_set (linsn)) != 0
5122 && SET_DEST (set) == parmreg)
5123 REG_NOTES (linsn)
5124 = gen_rtx_EXPR_LIST (REG_EQUIV,
5125 stack_parm, REG_NOTES (linsn));
5128 /* For pointer data type, suggest pointer register. */
5129 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5130 mark_reg_pointer (parmreg,
5131 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5133 /* If something wants our address, try to use ADDRESSOF. */
5134 if (TREE_ADDRESSABLE (parm))
5136 /* If we end up putting something into the stack,
5137 fixup_var_refs_insns will need to make a pass over
5138 all the instructions. It looks through the pending
5139 sequences -- but it can't see the ones in the
5140 CONVERSION_INSNS, if they're not on the sequence
5141 stack. So, we go back to that sequence, just so that
5142 the fixups will happen. */
5143 push_to_sequence (conversion_insns);
5144 put_var_into_stack (parm, /*rescan=*/true);
5145 conversion_insns = get_insns ();
5146 end_sequence ();
5149 else
5151 /* Value must be stored in the stack slot STACK_PARM
5152 during function execution. */
5154 if (promoted_mode != nominal_mode)
5156 /* Conversion is required. */
5157 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5159 emit_move_insn (tempreg, validize_mem (entry_parm));
5161 push_to_sequence (conversion_insns);
5162 entry_parm = convert_to_mode (nominal_mode, tempreg,
5163 TREE_UNSIGNED (TREE_TYPE (parm)));
5164 if (stack_parm)
5165 /* ??? This may need a big-endian conversion on sparc64. */
5166 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5168 conversion_insns = get_insns ();
5169 did_conversion = 1;
5170 end_sequence ();
5173 if (entry_parm != stack_parm)
5175 if (stack_parm == 0)
5177 stack_parm
5178 = assign_stack_local (GET_MODE (entry_parm),
5179 GET_MODE_SIZE (GET_MODE (entry_parm)),
5181 set_mem_attributes (stack_parm, parm, 1);
5184 if (promoted_mode != nominal_mode)
5186 push_to_sequence (conversion_insns);
5187 emit_move_insn (validize_mem (stack_parm),
5188 validize_mem (entry_parm));
5189 conversion_insns = get_insns ();
5190 end_sequence ();
5192 else
5193 emit_move_insn (validize_mem (stack_parm),
5194 validize_mem (entry_parm));
5197 SET_DECL_RTL (parm, stack_parm);
5201 if (SPLIT_COMPLEX_ARGS)
5203 parm = orig_fnargs;
5205 for (; parm; parm = TREE_CHAIN (parm))
5207 tree type = TREE_TYPE (parm);
5209 if (TREE_CODE (type) == COMPLEX_TYPE)
5211 SET_DECL_RTL (parm,
5212 gen_rtx_CONCAT (DECL_MODE (parm),
5213 DECL_RTL (fnargs),
5214 DECL_RTL (TREE_CHAIN (fnargs))));
5215 DECL_INCOMING_RTL (parm)
5216 = gen_rtx_CONCAT (DECL_MODE (parm),
5217 DECL_INCOMING_RTL (fnargs),
5218 DECL_INCOMING_RTL (TREE_CHAIN (fnargs)));
5219 fnargs = TREE_CHAIN (fnargs);
5221 else
5223 SET_DECL_RTL (parm, DECL_RTL (fnargs));
5224 DECL_INCOMING_RTL (parm) = DECL_INCOMING_RTL (fnargs);
5226 fnargs = TREE_CHAIN (fnargs);
5230 /* Output all parameter conversion instructions (possibly including calls)
5231 now that all parameters have been copied out of hard registers. */
5232 emit_insn (conversion_insns);
5234 /* If we are receiving a struct value address as the first argument, set up
5235 the RTL for the function result. As this might require code to convert
5236 the transmitted address to Pmode, we do this here to ensure that possible
5237 preliminary conversions of the address have been emitted already. */
5238 if (function_result_decl)
5240 tree result = DECL_RESULT (fndecl);
5241 rtx addr = DECL_RTL (function_result_decl);
5242 rtx x;
5244 #ifdef POINTERS_EXTEND_UNSIGNED
5245 if (GET_MODE (addr) != Pmode)
5246 addr = convert_memory_address (Pmode, addr);
5247 #endif
5249 x = gen_rtx_MEM (DECL_MODE (result), addr);
5250 set_mem_attributes (x, result, 1);
5251 SET_DECL_RTL (result, x);
5254 last_parm_insn = get_last_insn ();
5256 current_function_args_size = stack_args_size.constant;
5258 /* Adjust function incoming argument size for alignment and
5259 minimum length. */
5261 #ifdef REG_PARM_STACK_SPACE
5262 #ifndef MAYBE_REG_PARM_STACK_SPACE
5263 current_function_args_size = MAX (current_function_args_size,
5264 REG_PARM_STACK_SPACE (fndecl));
5265 #endif
5266 #endif
5268 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5270 current_function_args_size
5271 = ((current_function_args_size + STACK_BYTES - 1)
5272 / STACK_BYTES) * STACK_BYTES;
5274 #ifdef ARGS_GROW_DOWNWARD
5275 current_function_arg_offset_rtx
5276 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5277 : expand_expr (size_diffop (stack_args_size.var,
5278 size_int (-stack_args_size.constant)),
5279 NULL_RTX, VOIDmode, 0));
5280 #else
5281 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5282 #endif
5284 /* See how many bytes, if any, of its args a function should try to pop
5285 on return. */
5287 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5288 current_function_args_size);
5290 /* For stdarg.h function, save info about
5291 regs and stack space used by the named args. */
5293 current_function_args_info = args_so_far;
5295 /* Set the rtx used for the function return value. Put this in its
5296 own variable so any optimizers that need this information don't have
5297 to include tree.h. Do this here so it gets done when an inlined
5298 function gets output. */
5300 current_function_return_rtx
5301 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5302 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5304 /* If scalar return value was computed in a pseudo-reg, or was a named
5305 return value that got dumped to the stack, copy that to the hard
5306 return register. */
5307 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5309 tree decl_result = DECL_RESULT (fndecl);
5310 rtx decl_rtl = DECL_RTL (decl_result);
5312 if (REG_P (decl_rtl)
5313 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5314 : DECL_REGISTER (decl_result))
5316 rtx real_decl_rtl;
5318 #ifdef FUNCTION_OUTGOING_VALUE
5319 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5320 fndecl);
5321 #else
5322 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5323 fndecl);
5324 #endif
5325 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5326 /* The delay slot scheduler assumes that current_function_return_rtx
5327 holds the hard register containing the return value, not a
5328 temporary pseudo. */
5329 current_function_return_rtx = real_decl_rtl;
5334 static tree
5335 split_complex_args (tree args)
5337 tree p;
5339 args = copy_list (args);
5341 for (p = args; p; p = TREE_CHAIN (p))
5343 tree complex_type = TREE_TYPE (p);
5345 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
5347 tree decl;
5348 tree subtype = TREE_TYPE (complex_type);
5350 /* Rewrite the PARM_DECL's type with its component. */
5351 TREE_TYPE (p) = subtype;
5352 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
5354 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
5355 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
5356 TREE_CHAIN (decl) = TREE_CHAIN (p);
5357 TREE_CHAIN (p) = decl;
5361 return args;
5364 /* Indicate whether REGNO is an incoming argument to the current function
5365 that was promoted to a wider mode. If so, return the RTX for the
5366 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5367 that REGNO is promoted from and whether the promotion was signed or
5368 unsigned. */
5370 #ifdef PROMOTE_FUNCTION_ARGS
5373 promoted_input_arg (regno, pmode, punsignedp)
5374 unsigned int regno;
5375 enum machine_mode *pmode;
5376 int *punsignedp;
5378 tree arg;
5380 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5381 arg = TREE_CHAIN (arg))
5382 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5383 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5384 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5386 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5387 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5389 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5390 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5391 && mode != DECL_MODE (arg))
5393 *pmode = DECL_MODE (arg);
5394 *punsignedp = unsignedp;
5395 return DECL_INCOMING_RTL (arg);
5399 return 0;
5402 #endif
5404 /* Compute the size and offset from the start of the stacked arguments for a
5405 parm passed in mode PASSED_MODE and with type TYPE.
5407 INITIAL_OFFSET_PTR points to the current offset into the stacked
5408 arguments.
5410 The starting offset and size for this parm are returned in
5411 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5412 nonzero, the offset is that of stack slot, which is returned in
5413 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5414 padding required from the initial offset ptr to the stack slot.
5416 IN_REGS is nonzero if the argument will be passed in registers. It will
5417 never be set if REG_PARM_STACK_SPACE is not defined.
5419 FNDECL is the function in which the argument was defined.
5421 There are two types of rounding that are done. The first, controlled by
5422 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5423 list to be aligned to the specific boundary (in bits). This rounding
5424 affects the initial and starting offsets, but not the argument size.
5426 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5427 optionally rounds the size of the parm to PARM_BOUNDARY. The
5428 initial offset is not affected by this rounding, while the size always
5429 is and the starting offset may be. */
5431 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5432 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5433 callers pass in the total size of args so far as
5434 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5436 void
5437 locate_and_pad_parm (passed_mode, type, in_regs, partial, fndecl,
5438 initial_offset_ptr, locate)
5439 enum machine_mode passed_mode;
5440 tree type;
5441 int in_regs;
5442 int partial;
5443 tree fndecl ATTRIBUTE_UNUSED;
5444 struct args_size *initial_offset_ptr;
5445 struct locate_and_pad_arg_data *locate;
5447 tree sizetree;
5448 enum direction where_pad;
5449 int boundary;
5450 int reg_parm_stack_space = 0;
5451 int part_size_in_regs;
5453 #ifdef REG_PARM_STACK_SPACE
5454 #ifdef MAYBE_REG_PARM_STACK_SPACE
5455 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5456 #else
5457 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5458 #endif
5460 /* If we have found a stack parm before we reach the end of the
5461 area reserved for registers, skip that area. */
5462 if (! in_regs)
5464 if (reg_parm_stack_space > 0)
5466 if (initial_offset_ptr->var)
5468 initial_offset_ptr->var
5469 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5470 ssize_int (reg_parm_stack_space));
5471 initial_offset_ptr->constant = 0;
5473 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5474 initial_offset_ptr->constant = reg_parm_stack_space;
5477 #endif /* REG_PARM_STACK_SPACE */
5479 part_size_in_regs = 0;
5480 if (reg_parm_stack_space == 0)
5481 part_size_in_regs = ((partial * UNITS_PER_WORD)
5482 / (PARM_BOUNDARY / BITS_PER_UNIT)
5483 * (PARM_BOUNDARY / BITS_PER_UNIT));
5485 sizetree
5486 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5487 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5488 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5490 #ifdef ARGS_GROW_DOWNWARD
5491 locate->slot_offset.constant = -initial_offset_ptr->constant;
5492 if (initial_offset_ptr->var)
5493 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
5494 initial_offset_ptr->var);
5497 tree s2 = sizetree;
5498 if (where_pad != none
5499 && (!host_integerp (sizetree, 1)
5500 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5501 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5502 SUB_PARM_SIZE (locate->slot_offset, s2);
5505 locate->slot_offset.constant += part_size_in_regs;
5507 if (!in_regs
5508 #ifdef REG_PARM_STACK_SPACE
5509 || REG_PARM_STACK_SPACE (fndecl) > 0
5510 #endif
5512 pad_to_arg_alignment (&locate->slot_offset, boundary,
5513 &locate->alignment_pad);
5515 locate->size.constant = (-initial_offset_ptr->constant
5516 - locate->slot_offset.constant);
5517 if (initial_offset_ptr->var)
5518 locate->size.var = size_binop (MINUS_EXPR,
5519 size_binop (MINUS_EXPR,
5520 ssize_int (0),
5521 initial_offset_ptr->var),
5522 locate->slot_offset.var);
5524 /* Pad_below needs the pre-rounded size to know how much to pad
5525 below. */
5526 locate->offset = locate->slot_offset;
5527 if (where_pad == downward)
5528 pad_below (&locate->offset, passed_mode, sizetree);
5530 #else /* !ARGS_GROW_DOWNWARD */
5531 if (!in_regs
5532 #ifdef REG_PARM_STACK_SPACE
5533 || REG_PARM_STACK_SPACE (fndecl) > 0
5534 #endif
5536 pad_to_arg_alignment (initial_offset_ptr, boundary,
5537 &locate->alignment_pad);
5538 locate->slot_offset = *initial_offset_ptr;
5540 #ifdef PUSH_ROUNDING
5541 if (passed_mode != BLKmode)
5542 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5543 #endif
5545 /* Pad_below needs the pre-rounded size to know how much to pad below
5546 so this must be done before rounding up. */
5547 locate->offset = locate->slot_offset;
5548 if (where_pad == downward)
5549 pad_below (&locate->offset, passed_mode, sizetree);
5551 if (where_pad != none
5552 && (!host_integerp (sizetree, 1)
5553 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5554 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5556 ADD_PARM_SIZE (locate->size, sizetree);
5558 locate->size.constant -= part_size_in_regs;
5559 #endif /* ARGS_GROW_DOWNWARD */
5562 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5563 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5565 static void
5566 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5567 struct args_size *offset_ptr;
5568 int boundary;
5569 struct args_size *alignment_pad;
5571 tree save_var = NULL_TREE;
5572 HOST_WIDE_INT save_constant = 0;
5574 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5576 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5578 save_var = offset_ptr->var;
5579 save_constant = offset_ptr->constant;
5582 alignment_pad->var = NULL_TREE;
5583 alignment_pad->constant = 0;
5585 if (boundary > BITS_PER_UNIT)
5587 if (offset_ptr->var)
5589 offset_ptr->var =
5590 #ifdef ARGS_GROW_DOWNWARD
5591 round_down
5592 #else
5593 round_up
5594 #endif
5595 (ARGS_SIZE_TREE (*offset_ptr),
5596 boundary / BITS_PER_UNIT);
5597 /* ARGS_SIZE_TREE includes constant term. */
5598 offset_ptr->constant = 0;
5599 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5600 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5601 save_var);
5603 else
5605 offset_ptr->constant =
5606 #ifdef ARGS_GROW_DOWNWARD
5607 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5608 #else
5609 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5610 #endif
5611 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5612 alignment_pad->constant = offset_ptr->constant - save_constant;
5617 static void
5618 pad_below (offset_ptr, passed_mode, sizetree)
5619 struct args_size *offset_ptr;
5620 enum machine_mode passed_mode;
5621 tree sizetree;
5623 if (passed_mode != BLKmode)
5625 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5626 offset_ptr->constant
5627 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5628 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5629 - GET_MODE_SIZE (passed_mode));
5631 else
5633 if (TREE_CODE (sizetree) != INTEGER_CST
5634 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5636 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5637 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5638 /* Add it in. */
5639 ADD_PARM_SIZE (*offset_ptr, s2);
5640 SUB_PARM_SIZE (*offset_ptr, sizetree);
5645 /* Walk the tree of blocks describing the binding levels within a function
5646 and warn about uninitialized variables.
5647 This is done after calling flow_analysis and before global_alloc
5648 clobbers the pseudo-regs to hard regs. */
5650 void
5651 uninitialized_vars_warning (block)
5652 tree block;
5654 tree decl, sub;
5655 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5657 if (warn_uninitialized
5658 && TREE_CODE (decl) == VAR_DECL
5659 /* These warnings are unreliable for and aggregates
5660 because assigning the fields one by one can fail to convince
5661 flow.c that the entire aggregate was initialized.
5662 Unions are troublesome because members may be shorter. */
5663 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5664 && DECL_RTL (decl) != 0
5665 && GET_CODE (DECL_RTL (decl)) == REG
5666 /* Global optimizations can make it difficult to determine if a
5667 particular variable has been initialized. However, a VAR_DECL
5668 with a nonzero DECL_INITIAL had an initializer, so do not
5669 claim it is potentially uninitialized.
5671 We do not care about the actual value in DECL_INITIAL, so we do
5672 not worry that it may be a dangling pointer. */
5673 && DECL_INITIAL (decl) == NULL_TREE
5674 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5675 warning_with_decl (decl,
5676 "`%s' might be used uninitialized in this function");
5677 if (extra_warnings
5678 && TREE_CODE (decl) == VAR_DECL
5679 && DECL_RTL (decl) != 0
5680 && GET_CODE (DECL_RTL (decl)) == REG
5681 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5682 warning_with_decl (decl,
5683 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5685 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5686 uninitialized_vars_warning (sub);
5689 /* Do the appropriate part of uninitialized_vars_warning
5690 but for arguments instead of local variables. */
5692 void
5693 setjmp_args_warning ()
5695 tree decl;
5696 for (decl = DECL_ARGUMENTS (current_function_decl);
5697 decl; decl = TREE_CHAIN (decl))
5698 if (DECL_RTL (decl) != 0
5699 && GET_CODE (DECL_RTL (decl)) == REG
5700 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5701 warning_with_decl (decl,
5702 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5705 /* If this function call setjmp, put all vars into the stack
5706 unless they were declared `register'. */
5708 void
5709 setjmp_protect (block)
5710 tree block;
5712 tree decl, sub;
5713 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5714 if ((TREE_CODE (decl) == VAR_DECL
5715 || TREE_CODE (decl) == PARM_DECL)
5716 && DECL_RTL (decl) != 0
5717 && (GET_CODE (DECL_RTL (decl)) == REG
5718 || (GET_CODE (DECL_RTL (decl)) == MEM
5719 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5720 /* If this variable came from an inline function, it must be
5721 that its life doesn't overlap the setjmp. If there was a
5722 setjmp in the function, it would already be in memory. We
5723 must exclude such variable because their DECL_RTL might be
5724 set to strange things such as virtual_stack_vars_rtx. */
5725 && ! DECL_FROM_INLINE (decl)
5726 && (
5727 #ifdef NON_SAVING_SETJMP
5728 /* If longjmp doesn't restore the registers,
5729 don't put anything in them. */
5730 NON_SAVING_SETJMP
5732 #endif
5733 ! DECL_REGISTER (decl)))
5734 put_var_into_stack (decl, /*rescan=*/true);
5735 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5736 setjmp_protect (sub);
5739 /* Like the previous function, but for args instead of local variables. */
5741 void
5742 setjmp_protect_args ()
5744 tree decl;
5745 for (decl = DECL_ARGUMENTS (current_function_decl);
5746 decl; decl = TREE_CHAIN (decl))
5747 if ((TREE_CODE (decl) == VAR_DECL
5748 || TREE_CODE (decl) == PARM_DECL)
5749 && DECL_RTL (decl) != 0
5750 && (GET_CODE (DECL_RTL (decl)) == REG
5751 || (GET_CODE (DECL_RTL (decl)) == MEM
5752 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5753 && (
5754 /* If longjmp doesn't restore the registers,
5755 don't put anything in them. */
5756 #ifdef NON_SAVING_SETJMP
5757 NON_SAVING_SETJMP
5759 #endif
5760 ! DECL_REGISTER (decl)))
5761 put_var_into_stack (decl, /*rescan=*/true);
5764 /* Return the context-pointer register corresponding to DECL,
5765 or 0 if it does not need one. */
5768 lookup_static_chain (decl)
5769 tree decl;
5771 tree context = decl_function_context (decl);
5772 tree link;
5774 if (context == 0
5775 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5776 return 0;
5778 /* We treat inline_function_decl as an alias for the current function
5779 because that is the inline function whose vars, types, etc.
5780 are being merged into the current function.
5781 See expand_inline_function. */
5782 if (context == current_function_decl || context == inline_function_decl)
5783 return virtual_stack_vars_rtx;
5785 for (link = context_display; link; link = TREE_CHAIN (link))
5786 if (TREE_PURPOSE (link) == context)
5787 return RTL_EXPR_RTL (TREE_VALUE (link));
5789 abort ();
5792 /* Convert a stack slot address ADDR for variable VAR
5793 (from a containing function)
5794 into an address valid in this function (using a static chain). */
5797 fix_lexical_addr (addr, var)
5798 rtx addr;
5799 tree var;
5801 rtx basereg;
5802 HOST_WIDE_INT displacement;
5803 tree context = decl_function_context (var);
5804 struct function *fp;
5805 rtx base = 0;
5807 /* If this is the present function, we need not do anything. */
5808 if (context == current_function_decl || context == inline_function_decl)
5809 return addr;
5811 fp = find_function_data (context);
5813 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5814 addr = XEXP (XEXP (addr, 0), 0);
5816 /* Decode given address as base reg plus displacement. */
5817 if (GET_CODE (addr) == REG)
5818 basereg = addr, displacement = 0;
5819 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5820 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5821 else
5822 abort ();
5824 /* We accept vars reached via the containing function's
5825 incoming arg pointer and via its stack variables pointer. */
5826 if (basereg == fp->internal_arg_pointer)
5828 /* If reached via arg pointer, get the arg pointer value
5829 out of that function's stack frame.
5831 There are two cases: If a separate ap is needed, allocate a
5832 slot in the outer function for it and dereference it that way.
5833 This is correct even if the real ap is actually a pseudo.
5834 Otherwise, just adjust the offset from the frame pointer to
5835 compensate. */
5837 #ifdef NEED_SEPARATE_AP
5838 rtx addr;
5840 addr = get_arg_pointer_save_area (fp);
5841 addr = fix_lexical_addr (XEXP (addr, 0), var);
5842 addr = memory_address (Pmode, addr);
5844 base = gen_rtx_MEM (Pmode, addr);
5845 set_mem_alias_set (base, get_frame_alias_set ());
5846 base = copy_to_reg (base);
5847 #else
5848 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5849 base = lookup_static_chain (var);
5850 #endif
5853 else if (basereg == virtual_stack_vars_rtx)
5855 /* This is the same code as lookup_static_chain, duplicated here to
5856 avoid an extra call to decl_function_context. */
5857 tree link;
5859 for (link = context_display; link; link = TREE_CHAIN (link))
5860 if (TREE_PURPOSE (link) == context)
5862 base = RTL_EXPR_RTL (TREE_VALUE (link));
5863 break;
5867 if (base == 0)
5868 abort ();
5870 /* Use same offset, relative to appropriate static chain or argument
5871 pointer. */
5872 return plus_constant (base, displacement);
5875 /* Return the address of the trampoline for entering nested fn FUNCTION.
5876 If necessary, allocate a trampoline (in the stack frame)
5877 and emit rtl to initialize its contents (at entry to this function). */
5880 trampoline_address (function)
5881 tree function;
5883 tree link;
5884 tree rtlexp;
5885 rtx tramp;
5886 struct function *fp;
5887 tree fn_context;
5889 /* Find an existing trampoline and return it. */
5890 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5891 if (TREE_PURPOSE (link) == function)
5892 return
5893 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5895 for (fp = outer_function_chain; fp; fp = fp->outer)
5896 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5897 if (TREE_PURPOSE (link) == function)
5899 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5900 function);
5901 return adjust_trampoline_addr (tramp);
5904 /* None exists; we must make one. */
5906 /* Find the `struct function' for the function containing FUNCTION. */
5907 fp = 0;
5908 fn_context = decl_function_context (function);
5909 if (fn_context != current_function_decl
5910 && fn_context != inline_function_decl)
5911 fp = find_function_data (fn_context);
5913 /* Allocate run-time space for this trampoline. */
5914 /* If rounding needed, allocate extra space
5915 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5916 #define TRAMPOLINE_REAL_SIZE \
5917 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5918 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5919 fp ? fp : cfun);
5920 /* Record the trampoline for reuse and note it for later initialization
5921 by expand_function_end. */
5922 if (fp != 0)
5924 rtlexp = make_node (RTL_EXPR);
5925 RTL_EXPR_RTL (rtlexp) = tramp;
5926 fp->x_trampoline_list = tree_cons (function, rtlexp,
5927 fp->x_trampoline_list);
5929 else
5931 /* Make the RTL_EXPR node temporary, not momentary, so that the
5932 trampoline_list doesn't become garbage. */
5933 rtlexp = make_node (RTL_EXPR);
5935 RTL_EXPR_RTL (rtlexp) = tramp;
5936 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5939 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5940 return adjust_trampoline_addr (tramp);
5943 /* Given a trampoline address,
5944 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5946 static rtx
5947 round_trampoline_addr (tramp)
5948 rtx tramp;
5950 /* Round address up to desired boundary. */
5951 rtx temp = gen_reg_rtx (Pmode);
5952 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5953 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5955 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5956 temp, 0, OPTAB_LIB_WIDEN);
5957 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5958 temp, 0, OPTAB_LIB_WIDEN);
5960 return tramp;
5963 /* Given a trampoline address, round it then apply any
5964 platform-specific adjustments so that the result can be used for a
5965 function call . */
5967 static rtx
5968 adjust_trampoline_addr (tramp)
5969 rtx tramp;
5971 tramp = round_trampoline_addr (tramp);
5972 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5973 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5974 #endif
5975 return tramp;
5978 /* Put all this function's BLOCK nodes including those that are chained
5979 onto the first block into a vector, and return it.
5980 Also store in each NOTE for the beginning or end of a block
5981 the index of that block in the vector.
5982 The arguments are BLOCK, the chain of top-level blocks of the function,
5983 and INSNS, the insn chain of the function. */
5985 void
5986 identify_blocks ()
5988 int n_blocks;
5989 tree *block_vector, *last_block_vector;
5990 tree *block_stack;
5991 tree block = DECL_INITIAL (current_function_decl);
5993 if (block == 0)
5994 return;
5996 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5997 depth-first order. */
5998 block_vector = get_block_vector (block, &n_blocks);
5999 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
6001 last_block_vector = identify_blocks_1 (get_insns (),
6002 block_vector + 1,
6003 block_vector + n_blocks,
6004 block_stack);
6006 /* If we didn't use all of the subblocks, we've misplaced block notes. */
6007 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
6008 if (0 && last_block_vector != block_vector + n_blocks)
6009 abort ();
6011 free (block_vector);
6012 free (block_stack);
6015 /* Subroutine of identify_blocks. Do the block substitution on the
6016 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
6018 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
6019 BLOCK_VECTOR is incremented for each block seen. */
6021 static tree *
6022 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
6023 rtx insns;
6024 tree *block_vector;
6025 tree *end_block_vector;
6026 tree *orig_block_stack;
6028 rtx insn;
6029 tree *block_stack = orig_block_stack;
6031 for (insn = insns; insn; insn = NEXT_INSN (insn))
6033 if (GET_CODE (insn) == NOTE)
6035 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6037 tree b;
6039 /* If there are more block notes than BLOCKs, something
6040 is badly wrong. */
6041 if (block_vector == end_block_vector)
6042 abort ();
6044 b = *block_vector++;
6045 NOTE_BLOCK (insn) = b;
6046 *block_stack++ = b;
6048 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6050 /* If there are more NOTE_INSN_BLOCK_ENDs than
6051 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
6052 if (block_stack == orig_block_stack)
6053 abort ();
6055 NOTE_BLOCK (insn) = *--block_stack;
6058 else if (GET_CODE (insn) == CALL_INSN
6059 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6061 rtx cp = PATTERN (insn);
6063 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
6064 end_block_vector, block_stack);
6065 if (XEXP (cp, 1))
6066 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
6067 end_block_vector, block_stack);
6068 if (XEXP (cp, 2))
6069 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
6070 end_block_vector, block_stack);
6074 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
6075 something is badly wrong. */
6076 if (block_stack != orig_block_stack)
6077 abort ();
6079 return block_vector;
6082 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
6083 and create duplicate blocks. */
6084 /* ??? Need an option to either create block fragments or to create
6085 abstract origin duplicates of a source block. It really depends
6086 on what optimization has been performed. */
6088 void
6089 reorder_blocks ()
6091 tree block = DECL_INITIAL (current_function_decl);
6092 varray_type block_stack;
6094 if (block == NULL_TREE)
6095 return;
6097 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
6099 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6100 reorder_blocks_0 (block);
6102 /* Prune the old trees away, so that they don't get in the way. */
6103 BLOCK_SUBBLOCKS (block) = NULL_TREE;
6104 BLOCK_CHAIN (block) = NULL_TREE;
6106 /* Recreate the block tree from the note nesting. */
6107 reorder_blocks_1 (get_insns (), block, &block_stack);
6108 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
6110 /* Remove deleted blocks from the block fragment chains. */
6111 reorder_fix_fragments (block);
6114 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
6116 static void
6117 reorder_blocks_0 (block)
6118 tree block;
6120 while (block)
6122 TREE_ASM_WRITTEN (block) = 0;
6123 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
6124 block = BLOCK_CHAIN (block);
6128 static void
6129 reorder_blocks_1 (insns, current_block, p_block_stack)
6130 rtx insns;
6131 tree current_block;
6132 varray_type *p_block_stack;
6134 rtx insn;
6136 for (insn = insns; insn; insn = NEXT_INSN (insn))
6138 if (GET_CODE (insn) == NOTE)
6140 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6142 tree block = NOTE_BLOCK (insn);
6144 /* If we have seen this block before, that means it now
6145 spans multiple address regions. Create a new fragment. */
6146 if (TREE_ASM_WRITTEN (block))
6148 tree new_block = copy_node (block);
6149 tree origin;
6151 origin = (BLOCK_FRAGMENT_ORIGIN (block)
6152 ? BLOCK_FRAGMENT_ORIGIN (block)
6153 : block);
6154 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
6155 BLOCK_FRAGMENT_CHAIN (new_block)
6156 = BLOCK_FRAGMENT_CHAIN (origin);
6157 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
6159 NOTE_BLOCK (insn) = new_block;
6160 block = new_block;
6163 BLOCK_SUBBLOCKS (block) = 0;
6164 TREE_ASM_WRITTEN (block) = 1;
6165 /* When there's only one block for the entire function,
6166 current_block == block and we mustn't do this, it
6167 will cause infinite recursion. */
6168 if (block != current_block)
6170 BLOCK_SUPERCONTEXT (block) = current_block;
6171 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6172 BLOCK_SUBBLOCKS (current_block) = block;
6173 current_block = block;
6175 VARRAY_PUSH_TREE (*p_block_stack, block);
6177 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6179 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6180 VARRAY_POP (*p_block_stack);
6181 BLOCK_SUBBLOCKS (current_block)
6182 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6183 current_block = BLOCK_SUPERCONTEXT (current_block);
6186 else if (GET_CODE (insn) == CALL_INSN
6187 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6189 rtx cp = PATTERN (insn);
6190 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6191 if (XEXP (cp, 1))
6192 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6193 if (XEXP (cp, 2))
6194 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6199 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6200 appears in the block tree, select one of the fragments to become
6201 the new origin block. */
6203 static void
6204 reorder_fix_fragments (block)
6205 tree block;
6207 while (block)
6209 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6210 tree new_origin = NULL_TREE;
6212 if (dup_origin)
6214 if (! TREE_ASM_WRITTEN (dup_origin))
6216 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6218 /* Find the first of the remaining fragments. There must
6219 be at least one -- the current block. */
6220 while (! TREE_ASM_WRITTEN (new_origin))
6221 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6222 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6225 else if (! dup_origin)
6226 new_origin = block;
6228 /* Re-root the rest of the fragments to the new origin. In the
6229 case that DUP_ORIGIN was null, that means BLOCK was the origin
6230 of a chain of fragments and we want to remove those fragments
6231 that didn't make it to the output. */
6232 if (new_origin)
6234 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6235 tree chain = *pp;
6237 while (chain)
6239 if (TREE_ASM_WRITTEN (chain))
6241 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6242 *pp = chain;
6243 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6245 chain = BLOCK_FRAGMENT_CHAIN (chain);
6247 *pp = NULL_TREE;
6250 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6251 block = BLOCK_CHAIN (block);
6255 /* Reverse the order of elements in the chain T of blocks,
6256 and return the new head of the chain (old last element). */
6258 static tree
6259 blocks_nreverse (t)
6260 tree t;
6262 tree prev = 0, decl, next;
6263 for (decl = t; decl; decl = next)
6265 next = BLOCK_CHAIN (decl);
6266 BLOCK_CHAIN (decl) = prev;
6267 prev = decl;
6269 return prev;
6272 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6273 non-NULL, list them all into VECTOR, in a depth-first preorder
6274 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6275 blocks. */
6277 static int
6278 all_blocks (block, vector)
6279 tree block;
6280 tree *vector;
6282 int n_blocks = 0;
6284 while (block)
6286 TREE_ASM_WRITTEN (block) = 0;
6288 /* Record this block. */
6289 if (vector)
6290 vector[n_blocks] = block;
6292 ++n_blocks;
6294 /* Record the subblocks, and their subblocks... */
6295 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6296 vector ? vector + n_blocks : 0);
6297 block = BLOCK_CHAIN (block);
6300 return n_blocks;
6303 /* Return a vector containing all the blocks rooted at BLOCK. The
6304 number of elements in the vector is stored in N_BLOCKS_P. The
6305 vector is dynamically allocated; it is the caller's responsibility
6306 to call `free' on the pointer returned. */
6308 static tree *
6309 get_block_vector (block, n_blocks_p)
6310 tree block;
6311 int *n_blocks_p;
6313 tree *block_vector;
6315 *n_blocks_p = all_blocks (block, NULL);
6316 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6317 all_blocks (block, block_vector);
6319 return block_vector;
6322 static GTY(()) int next_block_index = 2;
6324 /* Set BLOCK_NUMBER for all the blocks in FN. */
6326 void
6327 number_blocks (fn)
6328 tree fn;
6330 int i;
6331 int n_blocks;
6332 tree *block_vector;
6334 /* For SDB and XCOFF debugging output, we start numbering the blocks
6335 from 1 within each function, rather than keeping a running
6336 count. */
6337 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6338 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6339 next_block_index = 1;
6340 #endif
6342 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6344 /* The top-level BLOCK isn't numbered at all. */
6345 for (i = 1; i < n_blocks; ++i)
6346 /* We number the blocks from two. */
6347 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6349 free (block_vector);
6351 return;
6354 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6356 tree
6357 debug_find_var_in_block_tree (var, block)
6358 tree var;
6359 tree block;
6361 tree t;
6363 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6364 if (t == var)
6365 return block;
6367 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6369 tree ret = debug_find_var_in_block_tree (var, t);
6370 if (ret)
6371 return ret;
6374 return NULL_TREE;
6377 /* Allocate a function structure and reset its contents to the defaults. */
6379 static void
6380 prepare_function_start ()
6382 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6384 init_stmt_for_function ();
6385 init_eh_for_function ();
6387 cse_not_expected = ! optimize;
6389 /* Caller save not needed yet. */
6390 caller_save_needed = 0;
6392 /* No stack slots have been made yet. */
6393 stack_slot_list = 0;
6395 current_function_has_nonlocal_label = 0;
6396 current_function_has_nonlocal_goto = 0;
6398 /* There is no stack slot for handling nonlocal gotos. */
6399 nonlocal_goto_handler_slots = 0;
6400 nonlocal_goto_stack_level = 0;
6402 /* No labels have been declared for nonlocal use. */
6403 nonlocal_labels = 0;
6404 nonlocal_goto_handler_labels = 0;
6406 /* No function calls so far in this function. */
6407 function_call_count = 0;
6409 /* No parm regs have been allocated.
6410 (This is important for output_inline_function.) */
6411 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6413 /* Initialize the RTL mechanism. */
6414 init_emit ();
6416 /* Initialize the queue of pending postincrement and postdecrements,
6417 and some other info in expr.c. */
6418 init_expr ();
6420 /* We haven't done register allocation yet. */
6421 reg_renumber = 0;
6423 init_varasm_status (cfun);
6425 /* Clear out data used for inlining. */
6426 cfun->inlinable = 0;
6427 cfun->original_decl_initial = 0;
6428 cfun->original_arg_vector = 0;
6430 cfun->stack_alignment_needed = STACK_BOUNDARY;
6431 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6433 /* Set if a call to setjmp is seen. */
6434 current_function_calls_setjmp = 0;
6436 /* Set if a call to longjmp is seen. */
6437 current_function_calls_longjmp = 0;
6439 current_function_calls_alloca = 0;
6440 current_function_calls_eh_return = 0;
6441 current_function_calls_constant_p = 0;
6442 current_function_contains_functions = 0;
6443 current_function_is_leaf = 0;
6444 current_function_nothrow = 0;
6445 current_function_sp_is_unchanging = 0;
6446 current_function_uses_only_leaf_regs = 0;
6447 current_function_has_computed_jump = 0;
6448 current_function_is_thunk = 0;
6450 current_function_returns_pcc_struct = 0;
6451 current_function_returns_struct = 0;
6452 current_function_epilogue_delay_list = 0;
6453 current_function_uses_const_pool = 0;
6454 current_function_uses_pic_offset_table = 0;
6455 current_function_cannot_inline = 0;
6457 /* We have not yet needed to make a label to jump to for tail-recursion. */
6458 tail_recursion_label = 0;
6460 /* We haven't had a need to make a save area for ap yet. */
6461 arg_pointer_save_area = 0;
6463 /* No stack slots allocated yet. */
6464 frame_offset = 0;
6466 /* No SAVE_EXPRs in this function yet. */
6467 save_expr_regs = 0;
6469 /* No RTL_EXPRs in this function yet. */
6470 rtl_expr_chain = 0;
6472 /* Set up to allocate temporaries. */
6473 init_temp_slots ();
6475 /* Indicate that we need to distinguish between the return value of the
6476 present function and the return value of a function being called. */
6477 rtx_equal_function_value_matters = 1;
6479 /* Indicate that we have not instantiated virtual registers yet. */
6480 virtuals_instantiated = 0;
6482 /* Indicate that we want CONCATs now. */
6483 generating_concat_p = 1;
6485 /* Indicate we have no need of a frame pointer yet. */
6486 frame_pointer_needed = 0;
6488 /* By default assume not stdarg. */
6489 current_function_stdarg = 0;
6491 /* We haven't made any trampolines for this function yet. */
6492 trampoline_list = 0;
6494 init_pending_stack_adjust ();
6495 inhibit_defer_pop = 0;
6497 current_function_outgoing_args_size = 0;
6499 current_function_funcdef_no = funcdef_no++;
6501 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6503 cfun->max_jumptable_ents = 0;
6505 (*lang_hooks.function.init) (cfun);
6506 if (init_machine_status)
6507 cfun->machine = (*init_machine_status) ();
6510 /* Initialize the rtl expansion mechanism so that we can do simple things
6511 like generate sequences. This is used to provide a context during global
6512 initialization of some passes. */
6513 void
6514 init_dummy_function_start ()
6516 prepare_function_start ();
6519 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6520 and initialize static variables for generating RTL for the statements
6521 of the function. */
6523 void
6524 init_function_start (subr, filename, line)
6525 tree subr;
6526 const char *filename;
6527 int line;
6529 prepare_function_start ();
6531 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6532 cfun->decl = subr;
6534 /* Nonzero if this is a nested function that uses a static chain. */
6536 current_function_needs_context
6537 = (decl_function_context (current_function_decl) != 0
6538 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6540 /* Within function body, compute a type's size as soon it is laid out. */
6541 immediate_size_expand++;
6543 /* Prevent ever trying to delete the first instruction of a function.
6544 Also tell final how to output a linenum before the function prologue.
6545 Note linenums could be missing, e.g. when compiling a Java .class file. */
6546 if (line > 0)
6547 emit_line_note (filename, line);
6549 /* Make sure first insn is a note even if we don't want linenums.
6550 This makes sure the first insn will never be deleted.
6551 Also, final expects a note to appear there. */
6552 emit_note (NULL, NOTE_INSN_DELETED);
6554 /* Set flags used by final.c. */
6555 if (aggregate_value_p (DECL_RESULT (subr)))
6557 #ifdef PCC_STATIC_STRUCT_RETURN
6558 current_function_returns_pcc_struct = 1;
6559 #endif
6560 current_function_returns_struct = 1;
6563 /* Warn if this value is an aggregate type,
6564 regardless of which calling convention we are using for it. */
6565 if (warn_aggregate_return
6566 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6567 warning ("function returns an aggregate");
6569 current_function_returns_pointer
6570 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6573 /* Make sure all values used by the optimization passes have sane
6574 defaults. */
6575 void
6576 init_function_for_compilation ()
6578 reg_renumber = 0;
6580 /* No prologue/epilogue insns yet. */
6581 VARRAY_GROW (prologue, 0);
6582 VARRAY_GROW (epilogue, 0);
6583 VARRAY_GROW (sibcall_epilogue, 0);
6586 /* Expand a call to __main at the beginning of a possible main function. */
6588 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6589 #undef HAS_INIT_SECTION
6590 #define HAS_INIT_SECTION
6591 #endif
6593 void
6594 expand_main_function ()
6596 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6597 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6599 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6600 rtx tmp, seq;
6602 start_sequence ();
6603 /* Forcibly align the stack. */
6604 #ifdef STACK_GROWS_DOWNWARD
6605 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6606 stack_pointer_rtx, 1, OPTAB_WIDEN);
6607 #else
6608 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6609 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6610 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6611 stack_pointer_rtx, 1, OPTAB_WIDEN);
6612 #endif
6613 if (tmp != stack_pointer_rtx)
6614 emit_move_insn (stack_pointer_rtx, tmp);
6616 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6617 tmp = force_reg (Pmode, const0_rtx);
6618 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6619 seq = get_insns ();
6620 end_sequence ();
6622 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6623 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6624 break;
6625 if (tmp)
6626 emit_insn_before (seq, tmp);
6627 else
6628 emit_insn (seq);
6630 #endif
6632 #ifndef HAS_INIT_SECTION
6633 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6634 #endif
6637 /* The PENDING_SIZES represent the sizes of variable-sized types.
6638 Create RTL for the various sizes now (using temporary variables),
6639 so that we can refer to the sizes from the RTL we are generating
6640 for the current function. The PENDING_SIZES are a TREE_LIST. The
6641 TREE_VALUE of each node is a SAVE_EXPR. */
6643 void
6644 expand_pending_sizes (pending_sizes)
6645 tree pending_sizes;
6647 tree tem;
6649 /* Evaluate now the sizes of any types declared among the arguments. */
6650 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6652 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6653 /* Flush the queue in case this parameter declaration has
6654 side-effects. */
6655 emit_queue ();
6659 /* Start the RTL for a new function, and set variables used for
6660 emitting RTL.
6661 SUBR is the FUNCTION_DECL node.
6662 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6663 the function's parameters, which must be run at any return statement. */
6665 void
6666 expand_function_start (subr, parms_have_cleanups)
6667 tree subr;
6668 int parms_have_cleanups;
6670 tree tem;
6671 rtx last_ptr = NULL_RTX;
6673 /* Make sure volatile mem refs aren't considered
6674 valid operands of arithmetic insns. */
6675 init_recog_no_volatile ();
6677 current_function_instrument_entry_exit
6678 = (flag_instrument_function_entry_exit
6679 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6681 current_function_profile
6682 = (profile_flag
6683 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6685 current_function_limit_stack
6686 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6688 /* If function gets a static chain arg, store it in the stack frame.
6689 Do this first, so it gets the first stack slot offset. */
6690 if (current_function_needs_context)
6692 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6694 /* Delay copying static chain if it is not a register to avoid
6695 conflicts with regs used for parameters. */
6696 if (! SMALL_REGISTER_CLASSES
6697 || GET_CODE (static_chain_incoming_rtx) == REG)
6698 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6701 /* If the parameters of this function need cleaning up, get a label
6702 for the beginning of the code which executes those cleanups. This must
6703 be done before doing anything with return_label. */
6704 if (parms_have_cleanups)
6705 cleanup_label = gen_label_rtx ();
6706 else
6707 cleanup_label = 0;
6709 /* Make the label for return statements to jump to. Do not special
6710 case machines with special return instructions -- they will be
6711 handled later during jump, ifcvt, or epilogue creation. */
6712 return_label = gen_label_rtx ();
6714 /* Initialize rtx used to return the value. */
6715 /* Do this before assign_parms so that we copy the struct value address
6716 before any library calls that assign parms might generate. */
6718 /* Decide whether to return the value in memory or in a register. */
6719 if (aggregate_value_p (DECL_RESULT (subr)))
6721 /* Returning something that won't go in a register. */
6722 rtx value_address = 0;
6724 #ifdef PCC_STATIC_STRUCT_RETURN
6725 if (current_function_returns_pcc_struct)
6727 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6728 value_address = assemble_static_space (size);
6730 else
6731 #endif
6733 /* Expect to be passed the address of a place to store the value.
6734 If it is passed as an argument, assign_parms will take care of
6735 it. */
6736 if (struct_value_incoming_rtx)
6738 value_address = gen_reg_rtx (Pmode);
6739 emit_move_insn (value_address, struct_value_incoming_rtx);
6742 if (value_address)
6744 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6745 set_mem_attributes (x, DECL_RESULT (subr), 1);
6746 SET_DECL_RTL (DECL_RESULT (subr), x);
6749 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6750 /* If return mode is void, this decl rtl should not be used. */
6751 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6752 else
6754 /* Compute the return values into a pseudo reg, which we will copy
6755 into the true return register after the cleanups are done. */
6757 /* In order to figure out what mode to use for the pseudo, we
6758 figure out what the mode of the eventual return register will
6759 actually be, and use that. */
6760 rtx hard_reg
6761 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6762 subr, 1);
6764 /* Structures that are returned in registers are not aggregate_value_p,
6765 so we may see a PARALLEL or a REG. */
6766 if (REG_P (hard_reg))
6767 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6768 else if (GET_CODE (hard_reg) == PARALLEL)
6769 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6770 else
6771 abort ();
6773 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6774 result to the real return register(s). */
6775 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6778 /* Initialize rtx for parameters and local variables.
6779 In some cases this requires emitting insns. */
6781 assign_parms (subr);
6783 /* Copy the static chain now if it wasn't a register. The delay is to
6784 avoid conflicts with the parameter passing registers. */
6786 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6787 if (GET_CODE (static_chain_incoming_rtx) != REG)
6788 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6790 /* The following was moved from init_function_start.
6791 The move is supposed to make sdb output more accurate. */
6792 /* Indicate the beginning of the function body,
6793 as opposed to parm setup. */
6794 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6796 if (GET_CODE (get_last_insn ()) != NOTE)
6797 emit_note (NULL, NOTE_INSN_DELETED);
6798 parm_birth_insn = get_last_insn ();
6800 context_display = 0;
6801 if (current_function_needs_context)
6803 /* Fetch static chain values for containing functions. */
6804 tem = decl_function_context (current_function_decl);
6805 /* Copy the static chain pointer into a pseudo. If we have
6806 small register classes, copy the value from memory if
6807 static_chain_incoming_rtx is a REG. */
6808 if (tem)
6810 /* If the static chain originally came in a register, put it back
6811 there, then move it out in the next insn. The reason for
6812 this peculiar code is to satisfy function integration. */
6813 if (SMALL_REGISTER_CLASSES
6814 && GET_CODE (static_chain_incoming_rtx) == REG)
6815 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6816 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6819 while (tem)
6821 tree rtlexp = make_node (RTL_EXPR);
6823 RTL_EXPR_RTL (rtlexp) = last_ptr;
6824 context_display = tree_cons (tem, rtlexp, context_display);
6825 tem = decl_function_context (tem);
6826 if (tem == 0)
6827 break;
6828 /* Chain thru stack frames, assuming pointer to next lexical frame
6829 is found at the place we always store it. */
6830 #ifdef FRAME_GROWS_DOWNWARD
6831 last_ptr = plus_constant (last_ptr,
6832 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6833 #endif
6834 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6835 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6836 last_ptr = copy_to_reg (last_ptr);
6838 /* If we are not optimizing, ensure that we know that this
6839 piece of context is live over the entire function. */
6840 if (! optimize)
6841 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6842 save_expr_regs);
6846 if (current_function_instrument_entry_exit)
6848 rtx fun = DECL_RTL (current_function_decl);
6849 if (GET_CODE (fun) == MEM)
6850 fun = XEXP (fun, 0);
6851 else
6852 abort ();
6853 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6854 2, fun, Pmode,
6855 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6857 hard_frame_pointer_rtx),
6858 Pmode);
6861 if (current_function_profile)
6863 #ifdef PROFILE_HOOK
6864 PROFILE_HOOK (current_function_funcdef_no);
6865 #endif
6868 /* After the display initializations is where the tail-recursion label
6869 should go, if we end up needing one. Ensure we have a NOTE here
6870 since some things (like trampolines) get placed before this. */
6871 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6873 /* Evaluate now the sizes of any types declared among the arguments. */
6874 expand_pending_sizes (nreverse (get_pending_sizes ()));
6876 /* Make sure there is a line number after the function entry setup code. */
6877 force_next_line_note ();
6880 /* Undo the effects of init_dummy_function_start. */
6881 void
6882 expand_dummy_function_end ()
6884 /* End any sequences that failed to be closed due to syntax errors. */
6885 while (in_sequence_p ())
6886 end_sequence ();
6888 /* Outside function body, can't compute type's actual size
6889 until next function's body starts. */
6891 free_after_parsing (cfun);
6892 free_after_compilation (cfun);
6893 cfun = 0;
6896 /* Call DOIT for each hard register used as a return value from
6897 the current function. */
6899 void
6900 diddle_return_value (doit, arg)
6901 void (*doit) PARAMS ((rtx, void *));
6902 void *arg;
6904 rtx outgoing = current_function_return_rtx;
6906 if (! outgoing)
6907 return;
6909 if (GET_CODE (outgoing) == REG)
6910 (*doit) (outgoing, arg);
6911 else if (GET_CODE (outgoing) == PARALLEL)
6913 int i;
6915 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6917 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6919 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6920 (*doit) (x, arg);
6925 static void
6926 do_clobber_return_reg (reg, arg)
6927 rtx reg;
6928 void *arg ATTRIBUTE_UNUSED;
6930 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6933 void
6934 clobber_return_register ()
6936 diddle_return_value (do_clobber_return_reg, NULL);
6938 /* In case we do use pseudo to return value, clobber it too. */
6939 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6941 tree decl_result = DECL_RESULT (current_function_decl);
6942 rtx decl_rtl = DECL_RTL (decl_result);
6943 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6945 do_clobber_return_reg (decl_rtl, NULL);
6950 static void
6951 do_use_return_reg (reg, arg)
6952 rtx reg;
6953 void *arg ATTRIBUTE_UNUSED;
6955 emit_insn (gen_rtx_USE (VOIDmode, reg));
6958 void
6959 use_return_register ()
6961 diddle_return_value (do_use_return_reg, NULL);
6964 static GTY(()) rtx initial_trampoline;
6966 /* Generate RTL for the end of the current function.
6967 FILENAME and LINE are the current position in the source file.
6969 It is up to language-specific callers to do cleanups for parameters--
6970 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6972 void
6973 expand_function_end (filename, line, end_bindings)
6974 const char *filename;
6975 int line;
6976 int end_bindings;
6978 tree link;
6979 rtx clobber_after;
6981 finish_expr_for_function ();
6983 /* If arg_pointer_save_area was referenced only from a nested
6984 function, we will not have initialized it yet. Do that now. */
6985 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6986 get_arg_pointer_save_area (cfun);
6988 #ifdef NON_SAVING_SETJMP
6989 /* Don't put any variables in registers if we call setjmp
6990 on a machine that fails to restore the registers. */
6991 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6993 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6994 setjmp_protect (DECL_INITIAL (current_function_decl));
6996 setjmp_protect_args ();
6998 #endif
7000 /* Initialize any trampolines required by this function. */
7001 for (link = trampoline_list; link; link = TREE_CHAIN (link))
7003 tree function = TREE_PURPOSE (link);
7004 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
7005 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7006 #ifdef TRAMPOLINE_TEMPLATE
7007 rtx blktramp;
7008 #endif
7009 rtx seq;
7011 #ifdef TRAMPOLINE_TEMPLATE
7012 /* First make sure this compilation has a template for
7013 initializing trampolines. */
7014 if (initial_trampoline == 0)
7016 initial_trampoline
7017 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
7018 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
7020 #endif
7022 /* Generate insns to initialize the trampoline. */
7023 start_sequence ();
7024 tramp = round_trampoline_addr (XEXP (tramp, 0));
7025 #ifdef TRAMPOLINE_TEMPLATE
7026 blktramp = replace_equiv_address (initial_trampoline, tramp);
7027 emit_block_move (blktramp, initial_trampoline,
7028 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
7029 #endif
7030 trampolines_created = 1;
7031 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
7032 seq = get_insns ();
7033 end_sequence ();
7035 /* Put those insns at entry to the containing function (this one). */
7036 emit_insn_before (seq, tail_recursion_reentry);
7039 /* If we are doing stack checking and this function makes calls,
7040 do a stack probe at the start of the function to ensure we have enough
7041 space for another stack frame. */
7042 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
7044 rtx insn, seq;
7046 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7047 if (GET_CODE (insn) == CALL_INSN)
7049 start_sequence ();
7050 probe_stack_range (STACK_CHECK_PROTECT,
7051 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
7052 seq = get_insns ();
7053 end_sequence ();
7054 emit_insn_before (seq, tail_recursion_reentry);
7055 break;
7059 /* Possibly warn about unused parameters. */
7060 if (warn_unused_parameter)
7062 tree decl;
7064 for (decl = DECL_ARGUMENTS (current_function_decl);
7065 decl; decl = TREE_CHAIN (decl))
7066 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
7067 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
7068 warning_with_decl (decl, "unused parameter `%s'");
7071 /* Delete handlers for nonlocal gotos if nothing uses them. */
7072 if (nonlocal_goto_handler_slots != 0
7073 && ! current_function_has_nonlocal_label)
7074 delete_handlers ();
7076 /* End any sequences that failed to be closed due to syntax errors. */
7077 while (in_sequence_p ())
7078 end_sequence ();
7080 /* Outside function body, can't compute type's actual size
7081 until next function's body starts. */
7082 immediate_size_expand--;
7084 clear_pending_stack_adjust ();
7085 do_pending_stack_adjust ();
7087 /* Mark the end of the function body.
7088 If control reaches this insn, the function can drop through
7089 without returning a value. */
7090 emit_note (NULL, NOTE_INSN_FUNCTION_END);
7092 /* Must mark the last line number note in the function, so that the test
7093 coverage code can avoid counting the last line twice. This just tells
7094 the code to ignore the immediately following line note, since there
7095 already exists a copy of this note somewhere above. This line number
7096 note is still needed for debugging though, so we can't delete it. */
7097 if (flag_test_coverage)
7098 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
7100 /* Output a linenumber for the end of the function.
7101 SDB depends on this. */
7102 emit_line_note_force (filename, line);
7104 /* Before the return label (if any), clobber the return
7105 registers so that they are not propagated live to the rest of
7106 the function. This can only happen with functions that drop
7107 through; if there had been a return statement, there would
7108 have either been a return rtx, or a jump to the return label.
7110 We delay actual code generation after the current_function_value_rtx
7111 is computed. */
7112 clobber_after = get_last_insn ();
7114 /* Output the label for the actual return from the function,
7115 if one is expected. This happens either because a function epilogue
7116 is used instead of a return instruction, or because a return was done
7117 with a goto in order to run local cleanups, or because of pcc-style
7118 structure returning. */
7119 if (return_label)
7120 emit_label (return_label);
7122 /* C++ uses this. */
7123 if (end_bindings)
7124 expand_end_bindings (0, 0, 0);
7126 if (current_function_instrument_entry_exit)
7128 rtx fun = DECL_RTL (current_function_decl);
7129 if (GET_CODE (fun) == MEM)
7130 fun = XEXP (fun, 0);
7131 else
7132 abort ();
7133 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
7134 2, fun, Pmode,
7135 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
7137 hard_frame_pointer_rtx),
7138 Pmode);
7141 /* Let except.c know where it should emit the call to unregister
7142 the function context for sjlj exceptions. */
7143 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
7144 sjlj_emit_function_exit_after (get_last_insn ());
7146 /* If we had calls to alloca, and this machine needs
7147 an accurate stack pointer to exit the function,
7148 insert some code to save and restore the stack pointer. */
7149 #ifdef EXIT_IGNORE_STACK
7150 if (! EXIT_IGNORE_STACK)
7151 #endif
7152 if (current_function_calls_alloca)
7154 rtx tem = 0;
7156 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7157 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7160 /* If scalar return value was computed in a pseudo-reg, or was a named
7161 return value that got dumped to the stack, copy that to the hard
7162 return register. */
7163 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7165 tree decl_result = DECL_RESULT (current_function_decl);
7166 rtx decl_rtl = DECL_RTL (decl_result);
7168 if (REG_P (decl_rtl)
7169 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7170 : DECL_REGISTER (decl_result))
7172 rtx real_decl_rtl = current_function_return_rtx;
7174 /* This should be set in assign_parms. */
7175 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7176 abort ();
7178 /* If this is a BLKmode structure being returned in registers,
7179 then use the mode computed in expand_return. Note that if
7180 decl_rtl is memory, then its mode may have been changed,
7181 but that current_function_return_rtx has not. */
7182 if (GET_MODE (real_decl_rtl) == BLKmode)
7183 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7185 /* If a named return value dumped decl_return to memory, then
7186 we may need to re-do the PROMOTE_MODE signed/unsigned
7187 extension. */
7188 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7190 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7192 #ifdef PROMOTE_FUNCTION_RETURN
7193 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7194 &unsignedp, 1);
7195 #endif
7197 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7199 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7201 /* If expand_function_start has created a PARALLEL for decl_rtl,
7202 move the result to the real return registers. Otherwise, do
7203 a group load from decl_rtl for a named return. */
7204 if (GET_CODE (decl_rtl) == PARALLEL)
7205 emit_group_move (real_decl_rtl, decl_rtl);
7206 else
7207 emit_group_load (real_decl_rtl, decl_rtl,
7208 int_size_in_bytes (TREE_TYPE (decl_result)));
7210 else
7211 emit_move_insn (real_decl_rtl, decl_rtl);
7215 /* If returning a structure, arrange to return the address of the value
7216 in a place where debuggers expect to find it.
7218 If returning a structure PCC style,
7219 the caller also depends on this value.
7220 And current_function_returns_pcc_struct is not necessarily set. */
7221 if (current_function_returns_struct
7222 || current_function_returns_pcc_struct)
7224 rtx value_address
7225 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7226 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7227 #ifdef FUNCTION_OUTGOING_VALUE
7228 rtx outgoing
7229 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7230 current_function_decl);
7231 #else
7232 rtx outgoing
7233 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7234 #endif
7236 /* Mark this as a function return value so integrate will delete the
7237 assignment and USE below when inlining this function. */
7238 REG_FUNCTION_VALUE_P (outgoing) = 1;
7240 #ifdef POINTERS_EXTEND_UNSIGNED
7241 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7242 if (GET_MODE (outgoing) != GET_MODE (value_address))
7243 value_address = convert_memory_address (GET_MODE (outgoing),
7244 value_address);
7245 #endif
7247 emit_move_insn (outgoing, value_address);
7249 /* Show return register used to hold result (in this case the address
7250 of the result. */
7251 current_function_return_rtx = outgoing;
7254 /* If this is an implementation of throw, do what's necessary to
7255 communicate between __builtin_eh_return and the epilogue. */
7256 expand_eh_return ();
7258 /* Emit the actual code to clobber return register. */
7260 rtx seq, after;
7262 start_sequence ();
7263 clobber_return_register ();
7264 seq = get_insns ();
7265 end_sequence ();
7267 after = emit_insn_after (seq, clobber_after);
7269 if (clobber_after != after)
7270 cfun->x_clobber_return_insn = after;
7273 /* ??? This should no longer be necessary since stupid is no longer with
7274 us, but there are some parts of the compiler (eg reload_combine, and
7275 sh mach_dep_reorg) that still try and compute their own lifetime info
7276 instead of using the general framework. */
7277 use_return_register ();
7279 /* Fix up any gotos that jumped out to the outermost
7280 binding level of the function.
7281 Must follow emitting RETURN_LABEL. */
7283 /* If you have any cleanups to do at this point,
7284 and they need to create temporary variables,
7285 then you will lose. */
7286 expand_fixups (get_insns ());
7290 get_arg_pointer_save_area (f)
7291 struct function *f;
7293 rtx ret = f->x_arg_pointer_save_area;
7295 if (! ret)
7297 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7298 f->x_arg_pointer_save_area = ret;
7301 if (f == cfun && ! f->arg_pointer_save_area_init)
7303 rtx seq;
7305 /* Save the arg pointer at the beginning of the function. The
7306 generated stack slot may not be a valid memory address, so we
7307 have to check it and fix it if necessary. */
7308 start_sequence ();
7309 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7310 seq = get_insns ();
7311 end_sequence ();
7313 push_topmost_sequence ();
7314 emit_insn_after (seq, get_insns ());
7315 pop_topmost_sequence ();
7318 return ret;
7321 /* Extend a vector that records the INSN_UIDs of INSNS
7322 (a list of one or more insns). */
7324 static void
7325 record_insns (insns, vecp)
7326 rtx insns;
7327 varray_type *vecp;
7329 int i, len;
7330 rtx tmp;
7332 tmp = insns;
7333 len = 0;
7334 while (tmp != NULL_RTX)
7336 len++;
7337 tmp = NEXT_INSN (tmp);
7340 i = VARRAY_SIZE (*vecp);
7341 VARRAY_GROW (*vecp, i + len);
7342 tmp = insns;
7343 while (tmp != NULL_RTX)
7345 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7346 i++;
7347 tmp = NEXT_INSN (tmp);
7351 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7352 be running after reorg, SEQUENCE rtl is possible. */
7354 static int
7355 contains (insn, vec)
7356 rtx insn;
7357 varray_type vec;
7359 int i, j;
7361 if (GET_CODE (insn) == INSN
7362 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7364 int count = 0;
7365 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7366 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7367 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7368 count++;
7369 return count;
7371 else
7373 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7374 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7375 return 1;
7377 return 0;
7381 prologue_epilogue_contains (insn)
7382 rtx insn;
7384 if (contains (insn, prologue))
7385 return 1;
7386 if (contains (insn, epilogue))
7387 return 1;
7388 return 0;
7392 sibcall_epilogue_contains (insn)
7393 rtx insn;
7395 if (sibcall_epilogue)
7396 return contains (insn, sibcall_epilogue);
7397 return 0;
7400 #ifdef HAVE_return
7401 /* Insert gen_return at the end of block BB. This also means updating
7402 block_for_insn appropriately. */
7404 static void
7405 emit_return_into_block (bb, line_note)
7406 basic_block bb;
7407 rtx line_note;
7409 emit_jump_insn_after (gen_return (), bb->end);
7410 if (line_note)
7411 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7412 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7414 #endif /* HAVE_return */
7416 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7418 /* These functions convert the epilogue into a variant that does not modify the
7419 stack pointer. This is used in cases where a function returns an object
7420 whose size is not known until it is computed. The called function leaves the
7421 object on the stack, leaves the stack depressed, and returns a pointer to
7422 the object.
7424 What we need to do is track all modifications and references to the stack
7425 pointer, deleting the modifications and changing the references to point to
7426 the location the stack pointer would have pointed to had the modifications
7427 taken place.
7429 These functions need to be portable so we need to make as few assumptions
7430 about the epilogue as we can. However, the epilogue basically contains
7431 three things: instructions to reset the stack pointer, instructions to
7432 reload registers, possibly including the frame pointer, and an
7433 instruction to return to the caller.
7435 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7436 We also make no attempt to validate the insns we make since if they are
7437 invalid, we probably can't do anything valid. The intent is that these
7438 routines get "smarter" as more and more machines start to use them and
7439 they try operating on different epilogues.
7441 We use the following structure to track what the part of the epilogue that
7442 we've already processed has done. We keep two copies of the SP equivalence,
7443 one for use during the insn we are processing and one for use in the next
7444 insn. The difference is because one part of a PARALLEL may adjust SP
7445 and the other may use it. */
7447 struct epi_info
7449 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7450 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7451 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7452 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7453 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7454 should be set to once we no longer need
7455 its value. */
7458 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7459 static void emit_equiv_load PARAMS ((struct epi_info *));
7461 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7462 no modifications to the stack pointer. Return the new list of insns. */
7464 static rtx
7465 keep_stack_depressed (insns)
7466 rtx insns;
7468 int j;
7469 struct epi_info info;
7470 rtx insn, next;
7472 /* If the epilogue is just a single instruction, it ust be OK as is. */
7474 if (NEXT_INSN (insns) == NULL_RTX)
7475 return insns;
7477 /* Otherwise, start a sequence, initialize the information we have, and
7478 process all the insns we were given. */
7479 start_sequence ();
7481 info.sp_equiv_reg = stack_pointer_rtx;
7482 info.sp_offset = 0;
7483 info.equiv_reg_src = 0;
7485 insn = insns;
7486 next = NULL_RTX;
7487 while (insn != NULL_RTX)
7489 next = NEXT_INSN (insn);
7491 if (!INSN_P (insn))
7493 add_insn (insn);
7494 insn = next;
7495 continue;
7498 /* If this insn references the register that SP is equivalent to and
7499 we have a pending load to that register, we must force out the load
7500 first and then indicate we no longer know what SP's equivalent is. */
7501 if (info.equiv_reg_src != 0
7502 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7504 emit_equiv_load (&info);
7505 info.sp_equiv_reg = 0;
7508 info.new_sp_equiv_reg = info.sp_equiv_reg;
7509 info.new_sp_offset = info.sp_offset;
7511 /* If this is a (RETURN) and the return address is on the stack,
7512 update the address and change to an indirect jump. */
7513 if (GET_CODE (PATTERN (insn)) == RETURN
7514 || (GET_CODE (PATTERN (insn)) == PARALLEL
7515 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7517 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7518 rtx base = 0;
7519 HOST_WIDE_INT offset = 0;
7520 rtx jump_insn, jump_set;
7522 /* If the return address is in a register, we can emit the insn
7523 unchanged. Otherwise, it must be a MEM and we see what the
7524 base register and offset are. In any case, we have to emit any
7525 pending load to the equivalent reg of SP, if any. */
7526 if (GET_CODE (retaddr) == REG)
7528 emit_equiv_load (&info);
7529 add_insn (insn);
7530 insn = next;
7531 continue;
7533 else if (GET_CODE (retaddr) == MEM
7534 && GET_CODE (XEXP (retaddr, 0)) == REG)
7535 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7536 else if (GET_CODE (retaddr) == MEM
7537 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7538 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7539 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7541 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7542 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7544 else
7545 abort ();
7547 /* If the base of the location containing the return pointer
7548 is SP, we must update it with the replacement address. Otherwise,
7549 just build the necessary MEM. */
7550 retaddr = plus_constant (base, offset);
7551 if (base == stack_pointer_rtx)
7552 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7553 plus_constant (info.sp_equiv_reg,
7554 info.sp_offset));
7556 retaddr = gen_rtx_MEM (Pmode, retaddr);
7558 /* If there is a pending load to the equivalent register for SP
7559 and we reference that register, we must load our address into
7560 a scratch register and then do that load. */
7561 if (info.equiv_reg_src
7562 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7564 unsigned int regno;
7565 rtx reg;
7567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7568 if (HARD_REGNO_MODE_OK (regno, Pmode)
7569 && !fixed_regs[regno]
7570 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7571 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7572 regno)
7573 && !refers_to_regno_p (regno,
7574 regno + HARD_REGNO_NREGS (regno,
7575 Pmode),
7576 info.equiv_reg_src, NULL))
7577 break;
7579 if (regno == FIRST_PSEUDO_REGISTER)
7580 abort ();
7582 reg = gen_rtx_REG (Pmode, regno);
7583 emit_move_insn (reg, retaddr);
7584 retaddr = reg;
7587 emit_equiv_load (&info);
7588 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7590 /* Show the SET in the above insn is a RETURN. */
7591 jump_set = single_set (jump_insn);
7592 if (jump_set == 0)
7593 abort ();
7594 else
7595 SET_IS_RETURN_P (jump_set) = 1;
7598 /* If SP is not mentioned in the pattern and its equivalent register, if
7599 any, is not modified, just emit it. Otherwise, if neither is set,
7600 replace the reference to SP and emit the insn. If none of those are
7601 true, handle each SET individually. */
7602 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7603 && (info.sp_equiv_reg == stack_pointer_rtx
7604 || !reg_set_p (info.sp_equiv_reg, insn)))
7605 add_insn (insn);
7606 else if (! reg_set_p (stack_pointer_rtx, insn)
7607 && (info.sp_equiv_reg == stack_pointer_rtx
7608 || !reg_set_p (info.sp_equiv_reg, insn)))
7610 if (! validate_replace_rtx (stack_pointer_rtx,
7611 plus_constant (info.sp_equiv_reg,
7612 info.sp_offset),
7613 insn))
7614 abort ();
7616 add_insn (insn);
7618 else if (GET_CODE (PATTERN (insn)) == SET)
7619 handle_epilogue_set (PATTERN (insn), &info);
7620 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7622 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7623 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7624 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7626 else
7627 add_insn (insn);
7629 info.sp_equiv_reg = info.new_sp_equiv_reg;
7630 info.sp_offset = info.new_sp_offset;
7632 insn = next;
7635 insns = get_insns ();
7636 end_sequence ();
7637 return insns;
7640 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7641 structure that contains information about what we've seen so far. We
7642 process this SET by either updating that data or by emitting one or
7643 more insns. */
7645 static void
7646 handle_epilogue_set (set, p)
7647 rtx set;
7648 struct epi_info *p;
7650 /* First handle the case where we are setting SP. Record what it is being
7651 set from. If unknown, abort. */
7652 if (reg_set_p (stack_pointer_rtx, set))
7654 if (SET_DEST (set) != stack_pointer_rtx)
7655 abort ();
7657 if (GET_CODE (SET_SRC (set)) == PLUS
7658 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7660 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7661 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7663 else
7664 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7666 /* If we are adjusting SP, we adjust from the old data. */
7667 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7669 p->new_sp_equiv_reg = p->sp_equiv_reg;
7670 p->new_sp_offset += p->sp_offset;
7673 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7674 abort ();
7676 return;
7679 /* Next handle the case where we are setting SP's equivalent register.
7680 If we already have a value to set it to, abort. We could update, but
7681 there seems little point in handling that case. Note that we have
7682 to allow for the case where we are setting the register set in
7683 the previous part of a PARALLEL inside a single insn. But use the
7684 old offset for any updates within this insn. */
7685 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7687 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7688 || p->equiv_reg_src != 0)
7689 abort ();
7690 else
7691 p->equiv_reg_src
7692 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7693 plus_constant (p->sp_equiv_reg,
7694 p->sp_offset));
7697 /* Otherwise, replace any references to SP in the insn to its new value
7698 and emit the insn. */
7699 else
7701 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7702 plus_constant (p->sp_equiv_reg,
7703 p->sp_offset));
7704 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7705 plus_constant (p->sp_equiv_reg,
7706 p->sp_offset));
7707 emit_insn (set);
7711 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7713 static void
7714 emit_equiv_load (p)
7715 struct epi_info *p;
7717 if (p->equiv_reg_src != 0)
7718 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7720 p->equiv_reg_src = 0;
7722 #endif
7724 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7725 this into place with notes indicating where the prologue ends and where
7726 the epilogue begins. Update the basic block information when possible. */
7728 void
7729 thread_prologue_and_epilogue_insns (f)
7730 rtx f ATTRIBUTE_UNUSED;
7732 int inserted = 0;
7733 edge e;
7734 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7735 rtx seq;
7736 #endif
7737 #ifdef HAVE_prologue
7738 rtx prologue_end = NULL_RTX;
7739 #endif
7740 #if defined (HAVE_epilogue) || defined(HAVE_return)
7741 rtx epilogue_end = NULL_RTX;
7742 #endif
7744 #ifdef HAVE_prologue
7745 if (HAVE_prologue)
7747 start_sequence ();
7748 seq = gen_prologue ();
7749 emit_insn (seq);
7751 /* Retain a map of the prologue insns. */
7752 record_insns (seq, &prologue);
7753 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7755 seq = get_insns ();
7756 end_sequence ();
7758 /* Can't deal with multiple successors of the entry block
7759 at the moment. Function should always have at least one
7760 entry point. */
7761 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7762 abort ();
7764 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7765 inserted = 1;
7767 #endif
7769 /* If the exit block has no non-fake predecessors, we don't need
7770 an epilogue. */
7771 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7772 if ((e->flags & EDGE_FAKE) == 0)
7773 break;
7774 if (e == NULL)
7775 goto epilogue_done;
7777 #ifdef HAVE_return
7778 if (optimize && HAVE_return)
7780 /* If we're allowed to generate a simple return instruction,
7781 then by definition we don't need a full epilogue. Examine
7782 the block that falls through to EXIT. If it does not
7783 contain any code, examine its predecessors and try to
7784 emit (conditional) return instructions. */
7786 basic_block last;
7787 edge e_next;
7788 rtx label;
7790 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7791 if (e->flags & EDGE_FALLTHRU)
7792 break;
7793 if (e == NULL)
7794 goto epilogue_done;
7795 last = e->src;
7797 /* Verify that there are no active instructions in the last block. */
7798 label = last->end;
7799 while (label && GET_CODE (label) != CODE_LABEL)
7801 if (active_insn_p (label))
7802 break;
7803 label = PREV_INSN (label);
7806 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7808 rtx epilogue_line_note = NULL_RTX;
7810 /* Locate the line number associated with the closing brace,
7811 if we can find one. */
7812 for (seq = get_last_insn ();
7813 seq && ! active_insn_p (seq);
7814 seq = PREV_INSN (seq))
7815 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7817 epilogue_line_note = seq;
7818 break;
7821 for (e = last->pred; e; e = e_next)
7823 basic_block bb = e->src;
7824 rtx jump;
7826 e_next = e->pred_next;
7827 if (bb == ENTRY_BLOCK_PTR)
7828 continue;
7830 jump = bb->end;
7831 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7832 continue;
7834 /* If we have an unconditional jump, we can replace that
7835 with a simple return instruction. */
7836 if (simplejump_p (jump))
7838 emit_return_into_block (bb, epilogue_line_note);
7839 delete_insn (jump);
7842 /* If we have a conditional jump, we can try to replace
7843 that with a conditional return instruction. */
7844 else if (condjump_p (jump))
7846 if (! redirect_jump (jump, 0, 0))
7847 continue;
7849 /* If this block has only one successor, it both jumps
7850 and falls through to the fallthru block, so we can't
7851 delete the edge. */
7852 if (bb->succ->succ_next == NULL)
7853 continue;
7855 else
7856 continue;
7858 /* Fix up the CFG for the successful change we just made. */
7859 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7862 /* Emit a return insn for the exit fallthru block. Whether
7863 this is still reachable will be determined later. */
7865 emit_barrier_after (last->end);
7866 emit_return_into_block (last, epilogue_line_note);
7867 epilogue_end = last->end;
7868 last->succ->flags &= ~EDGE_FALLTHRU;
7869 goto epilogue_done;
7872 #endif
7873 #ifdef HAVE_epilogue
7874 if (HAVE_epilogue)
7876 /* Find the edge that falls through to EXIT. Other edges may exist
7877 due to RETURN instructions, but those don't need epilogues.
7878 There really shouldn't be a mixture -- either all should have
7879 been converted or none, however... */
7881 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7882 if (e->flags & EDGE_FALLTHRU)
7883 break;
7884 if (e == NULL)
7885 goto epilogue_done;
7887 start_sequence ();
7888 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7890 seq = gen_epilogue ();
7892 #ifdef INCOMING_RETURN_ADDR_RTX
7893 /* If this function returns with the stack depressed and we can support
7894 it, massage the epilogue to actually do that. */
7895 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7896 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7897 seq = keep_stack_depressed (seq);
7898 #endif
7900 emit_jump_insn (seq);
7902 /* Retain a map of the epilogue insns. */
7903 record_insns (seq, &epilogue);
7905 seq = get_insns ();
7906 end_sequence ();
7908 insert_insn_on_edge (seq, e);
7909 inserted = 1;
7911 #endif
7912 epilogue_done:
7914 if (inserted)
7915 commit_edge_insertions ();
7917 #ifdef HAVE_sibcall_epilogue
7918 /* Emit sibling epilogues before any sibling call sites. */
7919 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7921 basic_block bb = e->src;
7922 rtx insn = bb->end;
7923 rtx i;
7924 rtx newinsn;
7926 if (GET_CODE (insn) != CALL_INSN
7927 || ! SIBLING_CALL_P (insn))
7928 continue;
7930 start_sequence ();
7931 emit_insn (gen_sibcall_epilogue ());
7932 seq = get_insns ();
7933 end_sequence ();
7935 /* Retain a map of the epilogue insns. Used in life analysis to
7936 avoid getting rid of sibcall epilogue insns. Do this before we
7937 actually emit the sequence. */
7938 record_insns (seq, &sibcall_epilogue);
7940 i = PREV_INSN (insn);
7941 newinsn = emit_insn_before (seq, insn);
7943 #endif
7945 #ifdef HAVE_prologue
7946 if (prologue_end)
7948 rtx insn, prev;
7950 /* GDB handles `break f' by setting a breakpoint on the first
7951 line note after the prologue. Which means (1) that if
7952 there are line number notes before where we inserted the
7953 prologue we should move them, and (2) we should generate a
7954 note before the end of the first basic block, if there isn't
7955 one already there.
7957 ??? This behavior is completely broken when dealing with
7958 multiple entry functions. We simply place the note always
7959 into first basic block and let alternate entry points
7960 to be missed.
7963 for (insn = prologue_end; insn; insn = prev)
7965 prev = PREV_INSN (insn);
7966 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7968 /* Note that we cannot reorder the first insn in the
7969 chain, since rest_of_compilation relies on that
7970 remaining constant. */
7971 if (prev == NULL)
7972 break;
7973 reorder_insns (insn, insn, prologue_end);
7977 /* Find the last line number note in the first block. */
7978 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7979 insn != prologue_end && insn;
7980 insn = PREV_INSN (insn))
7981 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7982 break;
7984 /* If we didn't find one, make a copy of the first line number
7985 we run across. */
7986 if (! insn)
7988 for (insn = next_active_insn (prologue_end);
7989 insn;
7990 insn = PREV_INSN (insn))
7991 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7993 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7994 NOTE_LINE_NUMBER (insn),
7995 prologue_end);
7996 break;
8000 #endif
8001 #ifdef HAVE_epilogue
8002 if (epilogue_end)
8004 rtx insn, next;
8006 /* Similarly, move any line notes that appear after the epilogue.
8007 There is no need, however, to be quite so anal about the existence
8008 of such a note. */
8009 for (insn = epilogue_end; insn; insn = next)
8011 next = NEXT_INSN (insn);
8012 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
8013 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
8016 #endif
8019 /* Reposition the prologue-end and epilogue-begin notes after instruction
8020 scheduling and delayed branch scheduling. */
8022 void
8023 reposition_prologue_and_epilogue_notes (f)
8024 rtx f ATTRIBUTE_UNUSED;
8026 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
8027 rtx insn, last, note;
8028 int len;
8030 if ((len = VARRAY_SIZE (prologue)) > 0)
8032 last = 0, note = 0;
8034 /* Scan from the beginning until we reach the last prologue insn.
8035 We apparently can't depend on basic_block_{head,end} after
8036 reorg has run. */
8037 for (insn = f; insn; insn = NEXT_INSN (insn))
8039 if (GET_CODE (insn) == NOTE)
8041 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
8042 note = insn;
8044 else if (contains (insn, prologue))
8046 last = insn;
8047 if (--len == 0)
8048 break;
8052 if (last)
8054 /* Find the prologue-end note if we haven't already, and
8055 move it to just after the last prologue insn. */
8056 if (note == 0)
8058 for (note = last; (note = NEXT_INSN (note));)
8059 if (GET_CODE (note) == NOTE
8060 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
8061 break;
8064 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
8065 if (GET_CODE (last) == CODE_LABEL)
8066 last = NEXT_INSN (last);
8067 reorder_insns (note, note, last);
8071 if ((len = VARRAY_SIZE (epilogue)) > 0)
8073 last = 0, note = 0;
8075 /* Scan from the end until we reach the first epilogue insn.
8076 We apparently can't depend on basic_block_{head,end} after
8077 reorg has run. */
8078 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8080 if (GET_CODE (insn) == NOTE)
8082 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
8083 note = insn;
8085 else if (contains (insn, epilogue))
8087 last = insn;
8088 if (--len == 0)
8089 break;
8093 if (last)
8095 /* Find the epilogue-begin note if we haven't already, and
8096 move it to just before the first epilogue insn. */
8097 if (note == 0)
8099 for (note = insn; (note = PREV_INSN (note));)
8100 if (GET_CODE (note) == NOTE
8101 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
8102 break;
8105 if (PREV_INSN (last) != note)
8106 reorder_insns (note, note, PREV_INSN (last));
8109 #endif /* HAVE_prologue or HAVE_epilogue */
8112 /* Called once, at initialization, to initialize function.c. */
8114 void
8115 init_function_once ()
8117 VARRAY_INT_INIT (prologue, 0, "prologue");
8118 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8119 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
8122 #include "gt-function.h"