(ARM Built-in Functions): New node. Document ARM builtin functions for iWMMXt
[official-gcc.git] / gcc / function.c
blob3f795549df8b6665984161027511c9a1cfaa41da
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "optabs.h"
52 #include "libfuncs.h"
53 #include "regs.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
56 #include "recog.h"
57 #include "output.h"
58 #include "basic-block.h"
59 #include "toplev.h"
60 #include "hashtab.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63 #include "integrate.h"
64 #include "langhooks.h"
66 #ifndef TRAMPOLINE_ALIGNMENT
67 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
68 #endif
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #endif
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
96 during rtl generation. If they are different register numbers, this is
97 always true. It may also be true if
98 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
99 generation. See fix_lexical_addr for details. */
101 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
102 #define NEED_SEPARATE_AP
103 #endif
105 /* Nonzero if function being compiled doesn't contain any calls
106 (ignoring the prologue and epilogue). This is set prior to
107 local register allocation and is valid for the remaining
108 compiler passes. */
109 int current_function_is_leaf;
111 /* Nonzero if function being compiled doesn't contain any instructions
112 that can throw an exception. This is set prior to final. */
114 int current_function_nothrow;
116 /* Nonzero if function being compiled doesn't modify the stack pointer
117 (ignoring the prologue and epilogue). This is only valid after
118 life_analysis has run. */
119 int current_function_sp_is_unchanging;
121 /* Nonzero if the function being compiled is a leaf function which only
122 uses leaf registers. This is valid after reload (specifically after
123 sched2) and is useful only if the port defines LEAF_REGISTERS. */
124 int current_function_uses_only_leaf_regs;
126 /* Nonzero once virtual register instantiation has been done.
127 assign_stack_local uses frame_pointer_rtx when this is nonzero.
128 calls.c:emit_library_call_value_1 uses it to set up
129 post-instantiation libcalls. */
130 int virtuals_instantiated;
132 /* Nonzero if at least one trampoline has been created. */
133 int trampolines_created;
135 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
136 static GTY(()) int funcdef_no;
138 /* These variables hold pointers to functions to create and destroy
139 target specific, per-function data structures. */
140 struct machine_function * (*init_machine_status) PARAMS ((void));
142 /* The FUNCTION_DECL for an inline function currently being expanded. */
143 tree inline_function_decl;
145 /* The currently compiled function. */
146 struct function *cfun = 0;
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static GTY(()) varray_type prologue;
150 static GTY(()) varray_type epilogue;
152 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
153 in this function. */
154 static GTY(()) varray_type sibcall_epilogue;
156 /* In order to evaluate some expressions, such as function calls returning
157 structures in memory, we need to temporarily allocate stack locations.
158 We record each allocated temporary in the following structure.
160 Associated with each temporary slot is a nesting level. When we pop up
161 one level, all temporaries associated with the previous level are freed.
162 Normally, all temporaries are freed after the execution of the statement
163 in which they were created. However, if we are inside a ({...}) grouping,
164 the result may be in a temporary and hence must be preserved. If the
165 result could be in a temporary, we preserve it if we can determine which
166 one it is in. If we cannot determine which temporary may contain the
167 result, all temporaries are preserved. A temporary is preserved by
168 pretending it was allocated at the previous nesting level.
170 Automatic variables are also assigned temporary slots, at the nesting
171 level where they are defined. They are marked a "kept" so that
172 free_temp_slots will not free them. */
174 struct temp_slot GTY(())
176 /* Points to next temporary slot. */
177 struct temp_slot *next;
178 /* The rtx to used to reference the slot. */
179 rtx slot;
180 /* The rtx used to represent the address if not the address of the
181 slot above. May be an EXPR_LIST if multiple addresses exist. */
182 rtx address;
183 /* The alignment (in bits) of the slot. */
184 unsigned int align;
185 /* The size, in units, of the slot. */
186 HOST_WIDE_INT size;
187 /* The type of the object in the slot, or zero if it doesn't correspond
188 to a type. We use this to determine whether a slot can be reused.
189 It can be reused if objects of the type of the new slot will always
190 conflict with objects of the type of the old slot. */
191 tree type;
192 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
193 tree rtl_expr;
194 /* Nonzero if this temporary is currently in use. */
195 char in_use;
196 /* Nonzero if this temporary has its address taken. */
197 char addr_taken;
198 /* Nesting level at which this slot is being used. */
199 int level;
200 /* Nonzero if this should survive a call to free_temp_slots. */
201 int keep;
202 /* The offset of the slot from the frame_pointer, including extra space
203 for alignment. This info is for combine_temp_slots. */
204 HOST_WIDE_INT base_offset;
205 /* The size of the slot, including extra space for alignment. This
206 info is for combine_temp_slots. */
207 HOST_WIDE_INT full_size;
210 /* This structure is used to record MEMs or pseudos used to replace VAR, any
211 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
212 maintain this list in case two operands of an insn were required to match;
213 in that case we must ensure we use the same replacement. */
215 struct fixup_replacement GTY(())
217 rtx old;
218 rtx new;
219 struct fixup_replacement *next;
222 struct insns_for_mem_entry
224 /* A MEM. */
225 rtx key;
226 /* These are the INSNs which reference the MEM. */
227 rtx insns;
230 /* Forward declarations. */
232 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
233 int, struct function *));
234 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
235 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
236 enum machine_mode, enum machine_mode,
237 int, unsigned int, int,
238 htab_t));
239 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
240 enum machine_mode,
241 htab_t));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
243 htab_t));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
247 int, int, rtx));
248 static void fixup_var_refs_insns_with_hash
249 PARAMS ((htab_t, rtx,
250 enum machine_mode, int, rtx));
251 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
252 int, int, rtx));
253 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
254 struct fixup_replacement **, rtx));
255 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
256 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
257 int));
258 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
259 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
260 static void instantiate_decls PARAMS ((tree, int));
261 static void instantiate_decls_1 PARAMS ((tree, int));
262 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
263 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
264 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
265 static void delete_handlers PARAMS ((void));
266 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
267 struct args_size *));
268 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
269 tree));
270 static rtx round_trampoline_addr PARAMS ((rtx));
271 static rtx adjust_trampoline_addr PARAMS ((rtx));
272 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
273 static void reorder_blocks_0 PARAMS ((tree));
274 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
275 static void reorder_fix_fragments PARAMS ((tree));
276 static tree blocks_nreverse PARAMS ((tree));
277 static int all_blocks PARAMS ((tree, tree *));
278 static tree *get_block_vector PARAMS ((tree, int *));
279 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
280 /* We always define `record_insns' even if its not used so that we
281 can always export `prologue_epilogue_contains'. */
282 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
283 static int contains PARAMS ((rtx, varray_type));
284 #ifdef HAVE_return
285 static void emit_return_into_block PARAMS ((basic_block, rtx));
286 #endif
287 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
288 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int, int, htab_t));
289 static void purge_single_hard_subreg_set PARAMS ((rtx));
290 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
291 static rtx keep_stack_depressed PARAMS ((rtx));
292 #endif
293 static int is_addressof PARAMS ((rtx *, void *));
294 static hashval_t insns_for_mem_hash PARAMS ((const void *));
295 static int insns_for_mem_comp PARAMS ((const void *, const void *));
296 static int insns_for_mem_walk PARAMS ((rtx *, void *));
297 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
298 static void prepare_function_start PARAMS ((void));
299 static void do_clobber_return_reg PARAMS ((rtx, void *));
300 static void do_use_return_reg PARAMS ((rtx, void *));
301 static void instantiate_virtual_regs_lossage PARAMS ((rtx));
302 static tree split_complex_args (tree);
303 static void set_insn_locators (rtx, int);
305 /* Pointer to chain of `struct function' for containing functions. */
306 static GTY(()) struct function *outer_function_chain;
308 /* List of insns that were postponed by purge_addressof_1. */
309 static rtx postponed_insns;
311 /* Given a function decl for a containing function,
312 return the `struct function' for it. */
314 struct function *
315 find_function_data (decl)
316 tree decl;
318 struct function *p;
320 for (p = outer_function_chain; p; p = p->outer)
321 if (p->decl == decl)
322 return p;
324 abort ();
327 /* Save the current context for compilation of a nested function.
328 This is called from language-specific code. The caller should use
329 the enter_nested langhook to save any language-specific state,
330 since this function knows only about language-independent
331 variables. */
333 void
334 push_function_context_to (context)
335 tree context;
337 struct function *p;
339 if (context)
341 if (context == current_function_decl)
342 cfun->contains_functions = 1;
343 else
345 struct function *containing = find_function_data (context);
346 containing->contains_functions = 1;
350 if (cfun == 0)
351 init_dummy_function_start ();
352 p = cfun;
354 p->outer = outer_function_chain;
355 outer_function_chain = p;
356 p->fixup_var_refs_queue = 0;
358 (*lang_hooks.function.enter_nested) (p);
360 cfun = 0;
363 void
364 push_function_context ()
366 push_function_context_to (current_function_decl);
369 /* Restore the last saved context, at the end of a nested function.
370 This function is called from language-specific code. */
372 void
373 pop_function_context_from (context)
374 tree context ATTRIBUTE_UNUSED;
376 struct function *p = outer_function_chain;
377 struct var_refs_queue *queue;
379 cfun = p;
380 outer_function_chain = p->outer;
382 current_function_decl = p->decl;
383 reg_renumber = 0;
385 restore_emit_status (p);
387 (*lang_hooks.function.leave_nested) (p);
389 /* Finish doing put_var_into_stack for any of our variables which became
390 addressable during the nested function. If only one entry has to be
391 fixed up, just do that one. Otherwise, first make a list of MEMs that
392 are not to be unshared. */
393 if (p->fixup_var_refs_queue == 0)
395 else if (p->fixup_var_refs_queue->next == 0)
396 fixup_var_refs (p->fixup_var_refs_queue->modified,
397 p->fixup_var_refs_queue->promoted_mode,
398 p->fixup_var_refs_queue->unsignedp,
399 p->fixup_var_refs_queue->modified, 0);
400 else
402 rtx list = 0;
404 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
405 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
407 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
408 fixup_var_refs (queue->modified, queue->promoted_mode,
409 queue->unsignedp, list, 0);
413 p->fixup_var_refs_queue = 0;
415 /* Reset variables that have known state during rtx generation. */
416 rtx_equal_function_value_matters = 1;
417 virtuals_instantiated = 0;
418 generating_concat_p = 1;
421 void
422 pop_function_context ()
424 pop_function_context_from (current_function_decl);
427 /* Clear out all parts of the state in F that can safely be discarded
428 after the function has been parsed, but not compiled, to let
429 garbage collection reclaim the memory. */
431 void
432 free_after_parsing (f)
433 struct function *f;
435 /* f->expr->forced_labels is used by code generation. */
436 /* f->emit->regno_reg_rtx is used by code generation. */
437 /* f->varasm is used by code generation. */
438 /* f->eh->eh_return_stub_label is used by code generation. */
440 (*lang_hooks.function.final) (f);
441 f->stmt = NULL;
444 /* Clear out all parts of the state in F that can safely be discarded
445 after the function has been compiled, to let garbage collection
446 reclaim the memory. */
448 void
449 free_after_compilation (f)
450 struct function *f;
452 f->eh = NULL;
453 f->expr = NULL;
454 f->emit = NULL;
455 f->varasm = NULL;
456 f->machine = NULL;
458 f->x_temp_slots = NULL;
459 f->arg_offset_rtx = NULL;
460 f->return_rtx = NULL;
461 f->internal_arg_pointer = NULL;
462 f->x_nonlocal_labels = NULL;
463 f->x_nonlocal_goto_handler_slots = NULL;
464 f->x_nonlocal_goto_handler_labels = NULL;
465 f->x_nonlocal_goto_stack_level = NULL;
466 f->x_cleanup_label = NULL;
467 f->x_return_label = NULL;
468 f->computed_goto_common_label = NULL;
469 f->computed_goto_common_reg = NULL;
470 f->x_save_expr_regs = NULL;
471 f->x_stack_slot_list = NULL;
472 f->x_rtl_expr_chain = NULL;
473 f->x_tail_recursion_label = NULL;
474 f->x_tail_recursion_reentry = NULL;
475 f->x_arg_pointer_save_area = NULL;
476 f->x_clobber_return_insn = NULL;
477 f->x_context_display = NULL;
478 f->x_trampoline_list = NULL;
479 f->x_parm_birth_insn = NULL;
480 f->x_last_parm_insn = NULL;
481 f->x_parm_reg_stack_loc = NULL;
482 f->fixup_var_refs_queue = NULL;
483 f->original_arg_vector = NULL;
484 f->original_decl_initial = NULL;
485 f->inl_last_parm_insn = NULL;
486 f->epilogue_delay_list = NULL;
489 /* Allocate fixed slots in the stack frame of the current function. */
491 /* Return size needed for stack frame based on slots so far allocated in
492 function F.
493 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
494 the caller may have to do that. */
496 HOST_WIDE_INT
497 get_func_frame_size (f)
498 struct function *f;
500 #ifdef FRAME_GROWS_DOWNWARD
501 return -f->x_frame_offset;
502 #else
503 return f->x_frame_offset;
504 #endif
507 /* Return size needed for stack frame based on slots so far allocated.
508 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
509 the caller may have to do that. */
510 HOST_WIDE_INT
511 get_frame_size ()
513 return get_func_frame_size (cfun);
516 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
517 with machine mode MODE.
519 ALIGN controls the amount of alignment for the address of the slot:
520 0 means according to MODE,
521 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
522 positive specifies alignment boundary in bits.
524 We do not round to stack_boundary here.
526 FUNCTION specifies the function to allocate in. */
528 static rtx
529 assign_stack_local_1 (mode, size, align, function)
530 enum machine_mode mode;
531 HOST_WIDE_INT size;
532 int align;
533 struct function *function;
535 rtx x, addr;
536 int bigend_correction = 0;
537 int alignment;
538 int frame_off, frame_alignment, frame_phase;
540 if (align == 0)
542 tree type;
544 if (mode == BLKmode)
545 alignment = BIGGEST_ALIGNMENT;
546 else
547 alignment = GET_MODE_ALIGNMENT (mode);
549 /* Allow the target to (possibly) increase the alignment of this
550 stack slot. */
551 type = (*lang_hooks.types.type_for_mode) (mode, 0);
552 if (type)
553 alignment = LOCAL_ALIGNMENT (type, alignment);
555 alignment /= BITS_PER_UNIT;
557 else if (align == -1)
559 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
560 size = CEIL_ROUND (size, alignment);
562 else
563 alignment = align / BITS_PER_UNIT;
565 #ifdef FRAME_GROWS_DOWNWARD
566 function->x_frame_offset -= size;
567 #endif
569 /* Ignore alignment we can't do with expected alignment of the boundary. */
570 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
571 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
573 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
574 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
576 /* Calculate how many bytes the start of local variables is off from
577 stack alignment. */
578 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
579 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
580 frame_phase = frame_off ? frame_alignment - frame_off : 0;
582 /* Round the frame offset to the specified alignment. The default is
583 to always honor requests to align the stack but a port may choose to
584 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
585 if (STACK_ALIGNMENT_NEEDED
586 || mode != BLKmode
587 || size != 0)
589 /* We must be careful here, since FRAME_OFFSET might be negative and
590 division with a negative dividend isn't as well defined as we might
591 like. So we instead assume that ALIGNMENT is a power of two and
592 use logical operations which are unambiguous. */
593 #ifdef FRAME_GROWS_DOWNWARD
594 function->x_frame_offset
595 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
596 + frame_phase);
597 #else
598 function->x_frame_offset
599 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
600 + frame_phase);
601 #endif
604 /* On a big-endian machine, if we are allocating more space than we will use,
605 use the least significant bytes of those that are allocated. */
606 if (BYTES_BIG_ENDIAN && mode != BLKmode)
607 bigend_correction = size - GET_MODE_SIZE (mode);
609 /* If we have already instantiated virtual registers, return the actual
610 address relative to the frame pointer. */
611 if (function == cfun && virtuals_instantiated)
612 addr = plus_constant (frame_pointer_rtx,
613 trunc_int_for_mode
614 (frame_offset + bigend_correction
615 + STARTING_FRAME_OFFSET, Pmode));
616 else
617 addr = plus_constant (virtual_stack_vars_rtx,
618 trunc_int_for_mode
619 (function->x_frame_offset + bigend_correction,
620 Pmode));
622 #ifndef FRAME_GROWS_DOWNWARD
623 function->x_frame_offset += size;
624 #endif
626 x = gen_rtx_MEM (mode, addr);
628 function->x_stack_slot_list
629 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
631 return x;
634 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
635 current function. */
638 assign_stack_local (mode, size, align)
639 enum machine_mode mode;
640 HOST_WIDE_INT size;
641 int align;
643 return assign_stack_local_1 (mode, size, align, cfun);
646 /* Allocate a temporary stack slot and record it for possible later
647 reuse.
649 MODE is the machine mode to be given to the returned rtx.
651 SIZE is the size in units of the space required. We do no rounding here
652 since assign_stack_local will do any required rounding.
654 KEEP is 1 if this slot is to be retained after a call to
655 free_temp_slots. Automatic variables for a block are allocated
656 with this flag. KEEP is 2 if we allocate a longer term temporary,
657 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
658 if we are to allocate something at an inner level to be treated as
659 a variable in the block (e.g., a SAVE_EXPR).
661 TYPE is the type that will be used for the stack slot. */
664 assign_stack_temp_for_type (mode, size, keep, type)
665 enum machine_mode mode;
666 HOST_WIDE_INT size;
667 int keep;
668 tree type;
670 unsigned int align;
671 struct temp_slot *p, *best_p = 0;
672 rtx slot;
674 /* If SIZE is -1 it means that somebody tried to allocate a temporary
675 of a variable size. */
676 if (size == -1)
677 abort ();
679 if (mode == BLKmode)
680 align = BIGGEST_ALIGNMENT;
681 else
682 align = GET_MODE_ALIGNMENT (mode);
684 if (! type)
685 type = (*lang_hooks.types.type_for_mode) (mode, 0);
687 if (type)
688 align = LOCAL_ALIGNMENT (type, align);
690 /* Try to find an available, already-allocated temporary of the proper
691 mode which meets the size and alignment requirements. Choose the
692 smallest one with the closest alignment. */
693 for (p = temp_slots; p; p = p->next)
694 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
695 && ! p->in_use
696 && objects_must_conflict_p (p->type, type)
697 && (best_p == 0 || best_p->size > p->size
698 || (best_p->size == p->size && best_p->align > p->align)))
700 if (p->align == align && p->size == size)
702 best_p = 0;
703 break;
705 best_p = p;
708 /* Make our best, if any, the one to use. */
709 if (best_p)
711 /* If there are enough aligned bytes left over, make them into a new
712 temp_slot so that the extra bytes don't get wasted. Do this only
713 for BLKmode slots, so that we can be sure of the alignment. */
714 if (GET_MODE (best_p->slot) == BLKmode)
716 int alignment = best_p->align / BITS_PER_UNIT;
717 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
719 if (best_p->size - rounded_size >= alignment)
721 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
722 p->in_use = p->addr_taken = 0;
723 p->size = best_p->size - rounded_size;
724 p->base_offset = best_p->base_offset + rounded_size;
725 p->full_size = best_p->full_size - rounded_size;
726 p->slot = gen_rtx_MEM (BLKmode,
727 plus_constant (XEXP (best_p->slot, 0),
728 rounded_size));
729 p->align = best_p->align;
730 p->address = 0;
731 p->rtl_expr = 0;
732 p->type = best_p->type;
733 p->next = temp_slots;
734 temp_slots = p;
736 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
737 stack_slot_list);
739 best_p->size = rounded_size;
740 best_p->full_size = rounded_size;
744 p = best_p;
747 /* If we still didn't find one, make a new temporary. */
748 if (p == 0)
750 HOST_WIDE_INT frame_offset_old = frame_offset;
752 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
754 /* We are passing an explicit alignment request to assign_stack_local.
755 One side effect of that is assign_stack_local will not round SIZE
756 to ensure the frame offset remains suitably aligned.
758 So for requests which depended on the rounding of SIZE, we go ahead
759 and round it now. We also make sure ALIGNMENT is at least
760 BIGGEST_ALIGNMENT. */
761 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
762 abort ();
763 p->slot = assign_stack_local (mode,
764 (mode == BLKmode
765 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
766 : size),
767 align);
769 p->align = align;
771 /* The following slot size computation is necessary because we don't
772 know the actual size of the temporary slot until assign_stack_local
773 has performed all the frame alignment and size rounding for the
774 requested temporary. Note that extra space added for alignment
775 can be either above or below this stack slot depending on which
776 way the frame grows. We include the extra space if and only if it
777 is above this slot. */
778 #ifdef FRAME_GROWS_DOWNWARD
779 p->size = frame_offset_old - frame_offset;
780 #else
781 p->size = size;
782 #endif
784 /* Now define the fields used by combine_temp_slots. */
785 #ifdef FRAME_GROWS_DOWNWARD
786 p->base_offset = frame_offset;
787 p->full_size = frame_offset_old - frame_offset;
788 #else
789 p->base_offset = frame_offset_old;
790 p->full_size = frame_offset - frame_offset_old;
791 #endif
792 p->address = 0;
793 p->next = temp_slots;
794 temp_slots = p;
797 p->in_use = 1;
798 p->addr_taken = 0;
799 p->rtl_expr = seq_rtl_expr;
800 p->type = type;
802 if (keep == 2)
804 p->level = target_temp_slot_level;
805 p->keep = 0;
807 else if (keep == 3)
809 p->level = var_temp_slot_level;
810 p->keep = 0;
812 else
814 p->level = temp_slot_level;
815 p->keep = keep;
819 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
820 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
821 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
823 /* If we know the alias set for the memory that will be used, use
824 it. If there's no TYPE, then we don't know anything about the
825 alias set for the memory. */
826 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
827 set_mem_align (slot, align);
829 /* If a type is specified, set the relevant flags. */
830 if (type != 0)
832 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
833 && TYPE_READONLY (type));
834 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
835 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
838 return slot;
841 /* Allocate a temporary stack slot and record it for possible later
842 reuse. First three arguments are same as in preceding function. */
845 assign_stack_temp (mode, size, keep)
846 enum machine_mode mode;
847 HOST_WIDE_INT size;
848 int keep;
850 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
853 /* Assign a temporary.
854 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
855 and so that should be used in error messages. In either case, we
856 allocate of the given type.
857 KEEP is as for assign_stack_temp.
858 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
859 it is 0 if a register is OK.
860 DONT_PROMOTE is 1 if we should not promote values in register
861 to wider modes. */
864 assign_temp (type_or_decl, keep, memory_required, dont_promote)
865 tree type_or_decl;
866 int keep;
867 int memory_required;
868 int dont_promote ATTRIBUTE_UNUSED;
870 tree type, decl;
871 enum machine_mode mode;
872 #ifndef PROMOTE_FOR_CALL_ONLY
873 int unsignedp;
874 #endif
876 if (DECL_P (type_or_decl))
877 decl = type_or_decl, type = TREE_TYPE (decl);
878 else
879 decl = NULL, type = type_or_decl;
881 mode = TYPE_MODE (type);
882 #ifndef PROMOTE_FOR_CALL_ONLY
883 unsignedp = TREE_UNSIGNED (type);
884 #endif
886 if (mode == BLKmode || memory_required)
888 HOST_WIDE_INT size = int_size_in_bytes (type);
889 rtx tmp;
891 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
892 problems with allocating the stack space. */
893 if (size == 0)
894 size = 1;
896 /* Unfortunately, we don't yet know how to allocate variable-sized
897 temporaries. However, sometimes we have a fixed upper limit on
898 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
899 instead. This is the case for Chill variable-sized strings. */
900 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
901 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
902 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
903 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
905 /* The size of the temporary may be too large to fit into an integer. */
906 /* ??? Not sure this should happen except for user silliness, so limit
907 this to things that aren't compiler-generated temporaries. The
908 rest of the time we'll abort in assign_stack_temp_for_type. */
909 if (decl && size == -1
910 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
912 error_with_decl (decl, "size of variable `%s' is too large");
913 size = 1;
916 tmp = assign_stack_temp_for_type (mode, size, keep, type);
917 return tmp;
920 #ifndef PROMOTE_FOR_CALL_ONLY
921 if (! dont_promote)
922 mode = promote_mode (type, mode, &unsignedp, 0);
923 #endif
925 return gen_reg_rtx (mode);
928 /* Combine temporary stack slots which are adjacent on the stack.
930 This allows for better use of already allocated stack space. This is only
931 done for BLKmode slots because we can be sure that we won't have alignment
932 problems in this case. */
934 void
935 combine_temp_slots ()
937 struct temp_slot *p, *q;
938 struct temp_slot *prev_p, *prev_q;
939 int num_slots;
941 /* We can't combine slots, because the information about which slot
942 is in which alias set will be lost. */
943 if (flag_strict_aliasing)
944 return;
946 /* If there are a lot of temp slots, don't do anything unless
947 high levels of optimization. */
948 if (! flag_expensive_optimizations)
949 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
950 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
951 return;
953 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
955 int delete_p = 0;
957 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
958 for (q = p->next, prev_q = p; q; q = prev_q->next)
960 int delete_q = 0;
961 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
963 if (p->base_offset + p->full_size == q->base_offset)
965 /* Q comes after P; combine Q into P. */
966 p->size += q->size;
967 p->full_size += q->full_size;
968 delete_q = 1;
970 else if (q->base_offset + q->full_size == p->base_offset)
972 /* P comes after Q; combine P into Q. */
973 q->size += p->size;
974 q->full_size += p->full_size;
975 delete_p = 1;
976 break;
979 /* Either delete Q or advance past it. */
980 if (delete_q)
981 prev_q->next = q->next;
982 else
983 prev_q = q;
985 /* Either delete P or advance past it. */
986 if (delete_p)
988 if (prev_p)
989 prev_p->next = p->next;
990 else
991 temp_slots = p->next;
993 else
994 prev_p = p;
998 /* Find the temp slot corresponding to the object at address X. */
1000 static struct temp_slot *
1001 find_temp_slot_from_address (x)
1002 rtx x;
1004 struct temp_slot *p;
1005 rtx next;
1007 for (p = temp_slots; p; p = p->next)
1009 if (! p->in_use)
1010 continue;
1012 else if (XEXP (p->slot, 0) == x
1013 || p->address == x
1014 || (GET_CODE (x) == PLUS
1015 && XEXP (x, 0) == virtual_stack_vars_rtx
1016 && GET_CODE (XEXP (x, 1)) == CONST_INT
1017 && INTVAL (XEXP (x, 1)) >= p->base_offset
1018 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1019 return p;
1021 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1022 for (next = p->address; next; next = XEXP (next, 1))
1023 if (XEXP (next, 0) == x)
1024 return p;
1027 /* If we have a sum involving a register, see if it points to a temp
1028 slot. */
1029 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1030 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1031 return p;
1032 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1033 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1034 return p;
1036 return 0;
1039 /* Indicate that NEW is an alternate way of referring to the temp slot
1040 that previously was known by OLD. */
1042 void
1043 update_temp_slot_address (old, new)
1044 rtx old, new;
1046 struct temp_slot *p;
1048 if (rtx_equal_p (old, new))
1049 return;
1051 p = find_temp_slot_from_address (old);
1053 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1054 is a register, see if one operand of the PLUS is a temporary
1055 location. If so, NEW points into it. Otherwise, if both OLD and
1056 NEW are a PLUS and if there is a register in common between them.
1057 If so, try a recursive call on those values. */
1058 if (p == 0)
1060 if (GET_CODE (old) != PLUS)
1061 return;
1063 if (GET_CODE (new) == REG)
1065 update_temp_slot_address (XEXP (old, 0), new);
1066 update_temp_slot_address (XEXP (old, 1), new);
1067 return;
1069 else if (GET_CODE (new) != PLUS)
1070 return;
1072 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1073 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1074 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1075 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1076 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1077 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1078 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1079 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1081 return;
1084 /* Otherwise add an alias for the temp's address. */
1085 else if (p->address == 0)
1086 p->address = new;
1087 else
1089 if (GET_CODE (p->address) != EXPR_LIST)
1090 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1092 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1096 /* If X could be a reference to a temporary slot, mark the fact that its
1097 address was taken. */
1099 void
1100 mark_temp_addr_taken (x)
1101 rtx x;
1103 struct temp_slot *p;
1105 if (x == 0)
1106 return;
1108 /* If X is not in memory or is at a constant address, it cannot be in
1109 a temporary slot. */
1110 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1111 return;
1113 p = find_temp_slot_from_address (XEXP (x, 0));
1114 if (p != 0)
1115 p->addr_taken = 1;
1118 /* If X could be a reference to a temporary slot, mark that slot as
1119 belonging to the to one level higher than the current level. If X
1120 matched one of our slots, just mark that one. Otherwise, we can't
1121 easily predict which it is, so upgrade all of them. Kept slots
1122 need not be touched.
1124 This is called when an ({...}) construct occurs and a statement
1125 returns a value in memory. */
1127 void
1128 preserve_temp_slots (x)
1129 rtx x;
1131 struct temp_slot *p = 0;
1133 /* If there is no result, we still might have some objects whose address
1134 were taken, so we need to make sure they stay around. */
1135 if (x == 0)
1137 for (p = temp_slots; p; p = p->next)
1138 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1139 p->level--;
1141 return;
1144 /* If X is a register that is being used as a pointer, see if we have
1145 a temporary slot we know it points to. To be consistent with
1146 the code below, we really should preserve all non-kept slots
1147 if we can't find a match, but that seems to be much too costly. */
1148 if (GET_CODE (x) == REG && REG_POINTER (x))
1149 p = find_temp_slot_from_address (x);
1151 /* If X is not in memory or is at a constant address, it cannot be in
1152 a temporary slot, but it can contain something whose address was
1153 taken. */
1154 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1156 for (p = temp_slots; p; p = p->next)
1157 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1158 p->level--;
1160 return;
1163 /* First see if we can find a match. */
1164 if (p == 0)
1165 p = find_temp_slot_from_address (XEXP (x, 0));
1167 if (p != 0)
1169 /* Move everything at our level whose address was taken to our new
1170 level in case we used its address. */
1171 struct temp_slot *q;
1173 if (p->level == temp_slot_level)
1175 for (q = temp_slots; q; q = q->next)
1176 if (q != p && q->addr_taken && q->level == p->level)
1177 q->level--;
1179 p->level--;
1180 p->addr_taken = 0;
1182 return;
1185 /* Otherwise, preserve all non-kept slots at this level. */
1186 for (p = temp_slots; p; p = p->next)
1187 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1188 p->level--;
1191 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1192 with that RTL_EXPR, promote it into a temporary slot at the present
1193 level so it will not be freed when we free slots made in the
1194 RTL_EXPR. */
1196 void
1197 preserve_rtl_expr_result (x)
1198 rtx x;
1200 struct temp_slot *p;
1202 /* If X is not in memory or is at a constant address, it cannot be in
1203 a temporary slot. */
1204 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1205 return;
1207 /* If we can find a match, move it to our level unless it is already at
1208 an upper level. */
1209 p = find_temp_slot_from_address (XEXP (x, 0));
1210 if (p != 0)
1212 p->level = MIN (p->level, temp_slot_level);
1213 p->rtl_expr = 0;
1216 return;
1219 /* Free all temporaries used so far. This is normally called at the end
1220 of generating code for a statement. Don't free any temporaries
1221 currently in use for an RTL_EXPR that hasn't yet been emitted.
1222 We could eventually do better than this since it can be reused while
1223 generating the same RTL_EXPR, but this is complex and probably not
1224 worthwhile. */
1226 void
1227 free_temp_slots ()
1229 struct temp_slot *p;
1231 for (p = temp_slots; p; p = p->next)
1232 if (p->in_use && p->level == temp_slot_level && ! p->keep
1233 && p->rtl_expr == 0)
1234 p->in_use = 0;
1236 combine_temp_slots ();
1239 /* Free all temporary slots used in T, an RTL_EXPR node. */
1241 void
1242 free_temps_for_rtl_expr (t)
1243 tree t;
1245 struct temp_slot *p;
1247 for (p = temp_slots; p; p = p->next)
1248 if (p->rtl_expr == t)
1250 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1251 needs to be preserved. This can happen if a temporary in
1252 the RTL_EXPR was addressed; preserve_temp_slots will move
1253 the temporary into a higher level. */
1254 if (temp_slot_level <= p->level)
1255 p->in_use = 0;
1256 else
1257 p->rtl_expr = NULL_TREE;
1260 combine_temp_slots ();
1263 /* Mark all temporaries ever allocated in this function as not suitable
1264 for reuse until the current level is exited. */
1266 void
1267 mark_all_temps_used ()
1269 struct temp_slot *p;
1271 for (p = temp_slots; p; p = p->next)
1273 p->in_use = p->keep = 1;
1274 p->level = MIN (p->level, temp_slot_level);
1278 /* Push deeper into the nesting level for stack temporaries. */
1280 void
1281 push_temp_slots ()
1283 temp_slot_level++;
1286 /* Pop a temporary nesting level. All slots in use in the current level
1287 are freed. */
1289 void
1290 pop_temp_slots ()
1292 struct temp_slot *p;
1294 for (p = temp_slots; p; p = p->next)
1295 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1296 p->in_use = 0;
1298 combine_temp_slots ();
1300 temp_slot_level--;
1303 /* Initialize temporary slots. */
1305 void
1306 init_temp_slots ()
1308 /* We have not allocated any temporaries yet. */
1309 temp_slots = 0;
1310 temp_slot_level = 0;
1311 var_temp_slot_level = 0;
1312 target_temp_slot_level = 0;
1315 /* Retroactively move an auto variable from a register to a stack
1316 slot. This is done when an address-reference to the variable is
1317 seen. If RESCAN is true, all previously emitted instructions are
1318 examined and modified to handle the fact that DECL is now
1319 addressable. */
1321 void
1322 put_var_into_stack (decl, rescan)
1323 tree decl;
1324 int rescan;
1326 rtx reg;
1327 enum machine_mode promoted_mode, decl_mode;
1328 struct function *function = 0;
1329 tree context;
1330 int can_use_addressof;
1331 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1332 int usedp = (TREE_USED (decl)
1333 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1335 context = decl_function_context (decl);
1337 /* Get the current rtl used for this object and its original mode. */
1338 reg = (TREE_CODE (decl) == SAVE_EXPR
1339 ? SAVE_EXPR_RTL (decl)
1340 : DECL_RTL_IF_SET (decl));
1342 /* No need to do anything if decl has no rtx yet
1343 since in that case caller is setting TREE_ADDRESSABLE
1344 and a stack slot will be assigned when the rtl is made. */
1345 if (reg == 0)
1346 return;
1348 /* Get the declared mode for this object. */
1349 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1350 : DECL_MODE (decl));
1351 /* Get the mode it's actually stored in. */
1352 promoted_mode = GET_MODE (reg);
1354 /* If this variable comes from an outer function, find that
1355 function's saved context. Don't use find_function_data here,
1356 because it might not be in any active function.
1357 FIXME: Is that really supposed to happen?
1358 It does in ObjC at least. */
1359 if (context != current_function_decl && context != inline_function_decl)
1360 for (function = outer_function_chain; function; function = function->outer)
1361 if (function->decl == context)
1362 break;
1364 /* If this is a variable-size object with a pseudo to address it,
1365 put that pseudo into the stack, if the var is nonlocal. */
1366 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1367 && GET_CODE (reg) == MEM
1368 && GET_CODE (XEXP (reg, 0)) == REG
1369 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1371 reg = XEXP (reg, 0);
1372 decl_mode = promoted_mode = GET_MODE (reg);
1375 can_use_addressof
1376 = (function == 0
1377 && optimize > 0
1378 /* FIXME make it work for promoted modes too */
1379 && decl_mode == promoted_mode
1380 #ifdef NON_SAVING_SETJMP
1381 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1382 #endif
1385 /* If we can't use ADDRESSOF, make sure we see through one we already
1386 generated. */
1387 if (! can_use_addressof && GET_CODE (reg) == MEM
1388 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1389 reg = XEXP (XEXP (reg, 0), 0);
1391 /* Now we should have a value that resides in one or more pseudo regs. */
1393 if (GET_CODE (reg) == REG)
1395 /* If this variable lives in the current function and we don't need
1396 to put things in the stack for the sake of setjmp, try to keep it
1397 in a register until we know we actually need the address. */
1398 if (can_use_addressof)
1399 gen_mem_addressof (reg, decl, rescan);
1400 else
1401 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1402 decl_mode, volatilep, 0, usedp, 0);
1404 else if (GET_CODE (reg) == CONCAT)
1406 /* A CONCAT contains two pseudos; put them both in the stack.
1407 We do it so they end up consecutive.
1408 We fixup references to the parts only after we fixup references
1409 to the whole CONCAT, lest we do double fixups for the latter
1410 references. */
1411 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1412 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1413 rtx lopart = XEXP (reg, 0);
1414 rtx hipart = XEXP (reg, 1);
1415 #ifdef FRAME_GROWS_DOWNWARD
1416 /* Since part 0 should have a lower address, do it second. */
1417 put_reg_into_stack (function, hipart, part_type, part_mode,
1418 part_mode, volatilep, 0, 0, 0);
1419 put_reg_into_stack (function, lopart, part_type, part_mode,
1420 part_mode, volatilep, 0, 0, 0);
1421 #else
1422 put_reg_into_stack (function, lopart, part_type, part_mode,
1423 part_mode, volatilep, 0, 0, 0);
1424 put_reg_into_stack (function, hipart, part_type, part_mode,
1425 part_mode, volatilep, 0, 0, 0);
1426 #endif
1428 /* Change the CONCAT into a combined MEM for both parts. */
1429 PUT_CODE (reg, MEM);
1430 MEM_ATTRS (reg) = 0;
1432 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1433 already computed alias sets. Here we want to re-generate. */
1434 if (DECL_P (decl))
1435 SET_DECL_RTL (decl, NULL);
1436 set_mem_attributes (reg, decl, 1);
1437 if (DECL_P (decl))
1438 SET_DECL_RTL (decl, reg);
1440 /* The two parts are in memory order already.
1441 Use the lower parts address as ours. */
1442 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1443 /* Prevent sharing of rtl that might lose. */
1444 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1445 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1446 if (usedp && rescan)
1448 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1449 promoted_mode, 0);
1450 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1451 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1454 else
1455 return;
1458 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1459 into the stack frame of FUNCTION (0 means the current function).
1460 DECL_MODE is the machine mode of the user-level data type.
1461 PROMOTED_MODE is the machine mode of the register.
1462 VOLATILE_P is nonzero if this is for a "volatile" decl.
1463 USED_P is nonzero if this reg might have already been used in an insn. */
1465 static void
1466 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1467 original_regno, used_p, ht)
1468 struct function *function;
1469 rtx reg;
1470 tree type;
1471 enum machine_mode promoted_mode, decl_mode;
1472 int volatile_p;
1473 unsigned int original_regno;
1474 int used_p;
1475 htab_t ht;
1477 struct function *func = function ? function : cfun;
1478 rtx new = 0;
1479 unsigned int regno = original_regno;
1481 if (regno == 0)
1482 regno = REGNO (reg);
1484 if (regno < func->x_max_parm_reg)
1485 new = func->x_parm_reg_stack_loc[regno];
1487 if (new == 0)
1488 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1490 PUT_CODE (reg, MEM);
1491 PUT_MODE (reg, decl_mode);
1492 XEXP (reg, 0) = XEXP (new, 0);
1493 MEM_ATTRS (reg) = 0;
1494 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1495 MEM_VOLATILE_P (reg) = volatile_p;
1497 /* If this is a memory ref that contains aggregate components,
1498 mark it as such for cse and loop optimize. If we are reusing a
1499 previously generated stack slot, then we need to copy the bit in
1500 case it was set for other reasons. For instance, it is set for
1501 __builtin_va_alist. */
1502 if (type)
1504 MEM_SET_IN_STRUCT_P (reg,
1505 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1506 set_mem_alias_set (reg, get_alias_set (type));
1509 if (used_p)
1510 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1513 /* Make sure that all refs to the variable, previously made
1514 when it was a register, are fixed up to be valid again.
1515 See function above for meaning of arguments. */
1517 static void
1518 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1519 struct function *function;
1520 rtx reg;
1521 tree type;
1522 enum machine_mode promoted_mode;
1523 htab_t ht;
1525 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1527 if (function != 0)
1529 struct var_refs_queue *temp;
1531 temp
1532 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1533 temp->modified = reg;
1534 temp->promoted_mode = promoted_mode;
1535 temp->unsignedp = unsigned_p;
1536 temp->next = function->fixup_var_refs_queue;
1537 function->fixup_var_refs_queue = temp;
1539 else
1540 /* Variable is local; fix it up now. */
1541 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1544 static void
1545 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1546 rtx var;
1547 enum machine_mode promoted_mode;
1548 int unsignedp;
1549 htab_t ht;
1550 rtx may_share;
1552 tree pending;
1553 rtx first_insn = get_insns ();
1554 struct sequence_stack *stack = seq_stack;
1555 tree rtl_exps = rtl_expr_chain;
1557 /* If there's a hash table, it must record all uses of VAR. */
1558 if (ht)
1560 if (stack != 0)
1561 abort ();
1562 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1563 may_share);
1564 return;
1567 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1568 stack == 0, may_share);
1570 /* Scan all pending sequences too. */
1571 for (; stack; stack = stack->next)
1573 push_to_full_sequence (stack->first, stack->last);
1574 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1575 stack->next != 0, may_share);
1576 /* Update remembered end of sequence
1577 in case we added an insn at the end. */
1578 stack->last = get_last_insn ();
1579 end_sequence ();
1582 /* Scan all waiting RTL_EXPRs too. */
1583 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1585 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1586 if (seq != const0_rtx && seq != 0)
1588 push_to_sequence (seq);
1589 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1590 may_share);
1591 end_sequence ();
1596 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1597 some part of an insn. Return a struct fixup_replacement whose OLD
1598 value is equal to X. Allocate a new structure if no such entry exists. */
1600 static struct fixup_replacement *
1601 find_fixup_replacement (replacements, x)
1602 struct fixup_replacement **replacements;
1603 rtx x;
1605 struct fixup_replacement *p;
1607 /* See if we have already replaced this. */
1608 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1611 if (p == 0)
1613 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1614 p->old = x;
1615 p->new = 0;
1616 p->next = *replacements;
1617 *replacements = p;
1620 return p;
1623 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1624 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1625 for the current function. MAY_SHARE is either a MEM that is not
1626 to be unshared or a list of them. */
1628 static void
1629 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1630 rtx insn;
1631 rtx var;
1632 enum machine_mode promoted_mode;
1633 int unsignedp;
1634 int toplevel;
1635 rtx may_share;
1637 while (insn)
1639 /* fixup_var_refs_insn might modify insn, so save its next
1640 pointer now. */
1641 rtx next = NEXT_INSN (insn);
1643 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1644 the three sequences they (potentially) contain, and process
1645 them recursively. The CALL_INSN itself is not interesting. */
1647 if (GET_CODE (insn) == CALL_INSN
1648 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1650 int i;
1652 /* Look at the Normal call, sibling call and tail recursion
1653 sequences attached to the CALL_PLACEHOLDER. */
1654 for (i = 0; i < 3; i++)
1656 rtx seq = XEXP (PATTERN (insn), i);
1657 if (seq)
1659 push_to_sequence (seq);
1660 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1661 may_share);
1662 XEXP (PATTERN (insn), i) = get_insns ();
1663 end_sequence ();
1668 else if (INSN_P (insn))
1669 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1670 may_share);
1672 insn = next;
1676 /* Look up the insns which reference VAR in HT and fix them up. Other
1677 arguments are the same as fixup_var_refs_insns.
1679 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1680 because the hash table will point straight to the interesting insn
1681 (inside the CALL_PLACEHOLDER). */
1683 static void
1684 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1685 htab_t ht;
1686 rtx var;
1687 enum machine_mode promoted_mode;
1688 int unsignedp;
1689 rtx may_share;
1691 struct insns_for_mem_entry tmp;
1692 struct insns_for_mem_entry *ime;
1693 rtx insn_list;
1695 tmp.key = var;
1696 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1697 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1698 if (INSN_P (XEXP (insn_list, 0)))
1699 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1700 unsignedp, 1, may_share);
1704 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1705 the insn under examination, VAR is the variable to fix up
1706 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1707 TOPLEVEL is nonzero if this is the main insn chain for this
1708 function. */
1710 static void
1711 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1712 rtx insn;
1713 rtx var;
1714 enum machine_mode promoted_mode;
1715 int unsignedp;
1716 int toplevel;
1717 rtx no_share;
1719 rtx call_dest = 0;
1720 rtx set, prev, prev_set;
1721 rtx note;
1723 /* Remember the notes in case we delete the insn. */
1724 note = REG_NOTES (insn);
1726 /* If this is a CLOBBER of VAR, delete it.
1728 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1729 and REG_RETVAL notes too. */
1730 if (GET_CODE (PATTERN (insn)) == CLOBBER
1731 && (XEXP (PATTERN (insn), 0) == var
1732 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1733 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1734 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1736 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1737 /* The REG_LIBCALL note will go away since we are going to
1738 turn INSN into a NOTE, so just delete the
1739 corresponding REG_RETVAL note. */
1740 remove_note (XEXP (note, 0),
1741 find_reg_note (XEXP (note, 0), REG_RETVAL,
1742 NULL_RTX));
1744 delete_insn (insn);
1747 /* The insn to load VAR from a home in the arglist
1748 is now a no-op. When we see it, just delete it.
1749 Similarly if this is storing VAR from a register from which
1750 it was loaded in the previous insn. This will occur
1751 when an ADDRESSOF was made for an arglist slot. */
1752 else if (toplevel
1753 && (set = single_set (insn)) != 0
1754 && SET_DEST (set) == var
1755 /* If this represents the result of an insn group,
1756 don't delete the insn. */
1757 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1758 && (rtx_equal_p (SET_SRC (set), var)
1759 || (GET_CODE (SET_SRC (set)) == REG
1760 && (prev = prev_nonnote_insn (insn)) != 0
1761 && (prev_set = single_set (prev)) != 0
1762 && SET_DEST (prev_set) == SET_SRC (set)
1763 && rtx_equal_p (SET_SRC (prev_set), var))))
1765 delete_insn (insn);
1767 else
1769 struct fixup_replacement *replacements = 0;
1770 rtx next_insn = NEXT_INSN (insn);
1772 if (SMALL_REGISTER_CLASSES)
1774 /* If the insn that copies the results of a CALL_INSN
1775 into a pseudo now references VAR, we have to use an
1776 intermediate pseudo since we want the life of the
1777 return value register to be only a single insn.
1779 If we don't use an intermediate pseudo, such things as
1780 address computations to make the address of VAR valid
1781 if it is not can be placed between the CALL_INSN and INSN.
1783 To make sure this doesn't happen, we record the destination
1784 of the CALL_INSN and see if the next insn uses both that
1785 and VAR. */
1787 if (call_dest != 0 && GET_CODE (insn) == INSN
1788 && reg_mentioned_p (var, PATTERN (insn))
1789 && reg_mentioned_p (call_dest, PATTERN (insn)))
1791 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1793 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1795 PATTERN (insn) = replace_rtx (PATTERN (insn),
1796 call_dest, temp);
1799 if (GET_CODE (insn) == CALL_INSN
1800 && GET_CODE (PATTERN (insn)) == SET)
1801 call_dest = SET_DEST (PATTERN (insn));
1802 else if (GET_CODE (insn) == CALL_INSN
1803 && GET_CODE (PATTERN (insn)) == PARALLEL
1804 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1805 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1806 else
1807 call_dest = 0;
1810 /* See if we have to do anything to INSN now that VAR is in
1811 memory. If it needs to be loaded into a pseudo, use a single
1812 pseudo for the entire insn in case there is a MATCH_DUP
1813 between two operands. We pass a pointer to the head of
1814 a list of struct fixup_replacements. If fixup_var_refs_1
1815 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1816 it will record them in this list.
1818 If it allocated a pseudo for any replacement, we copy into
1819 it here. */
1821 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1822 &replacements, no_share);
1824 /* If this is last_parm_insn, and any instructions were output
1825 after it to fix it up, then we must set last_parm_insn to
1826 the last such instruction emitted. */
1827 if (insn == last_parm_insn)
1828 last_parm_insn = PREV_INSN (next_insn);
1830 while (replacements)
1832 struct fixup_replacement *next;
1834 if (GET_CODE (replacements->new) == REG)
1836 rtx insert_before;
1837 rtx seq;
1839 /* OLD might be a (subreg (mem)). */
1840 if (GET_CODE (replacements->old) == SUBREG)
1841 replacements->old
1842 = fixup_memory_subreg (replacements->old, insn,
1843 promoted_mode, 0);
1844 else
1845 replacements->old
1846 = fixup_stack_1 (replacements->old, insn);
1848 insert_before = insn;
1850 /* If we are changing the mode, do a conversion.
1851 This might be wasteful, but combine.c will
1852 eliminate much of the waste. */
1854 if (GET_MODE (replacements->new)
1855 != GET_MODE (replacements->old))
1857 start_sequence ();
1858 convert_move (replacements->new,
1859 replacements->old, unsignedp);
1860 seq = get_insns ();
1861 end_sequence ();
1863 else
1864 seq = gen_move_insn (replacements->new,
1865 replacements->old);
1867 emit_insn_before (seq, insert_before);
1870 next = replacements->next;
1871 free (replacements);
1872 replacements = next;
1876 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1877 But don't touch other insns referred to by reg-notes;
1878 we will get them elsewhere. */
1879 while (note)
1881 if (GET_CODE (note) != INSN_LIST)
1882 XEXP (note, 0)
1883 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1884 promoted_mode, 1);
1885 note = XEXP (note, 1);
1889 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1890 See if the rtx expression at *LOC in INSN needs to be changed.
1892 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1893 contain a list of original rtx's and replacements. If we find that we need
1894 to modify this insn by replacing a memory reference with a pseudo or by
1895 making a new MEM to implement a SUBREG, we consult that list to see if
1896 we have already chosen a replacement. If none has already been allocated,
1897 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1898 or the SUBREG, as appropriate, to the pseudo. */
1900 static void
1901 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1902 rtx var;
1903 enum machine_mode promoted_mode;
1904 rtx *loc;
1905 rtx insn;
1906 struct fixup_replacement **replacements;
1907 rtx no_share;
1909 int i;
1910 rtx x = *loc;
1911 RTX_CODE code = GET_CODE (x);
1912 const char *fmt;
1913 rtx tem, tem1;
1914 struct fixup_replacement *replacement;
1916 switch (code)
1918 case ADDRESSOF:
1919 if (XEXP (x, 0) == var)
1921 /* Prevent sharing of rtl that might lose. */
1922 rtx sub = copy_rtx (XEXP (var, 0));
1924 if (! validate_change (insn, loc, sub, 0))
1926 rtx y = gen_reg_rtx (GET_MODE (sub));
1927 rtx seq, new_insn;
1929 /* We should be able to replace with a register or all is lost.
1930 Note that we can't use validate_change to verify this, since
1931 we're not caring for replacing all dups simultaneously. */
1932 if (! validate_replace_rtx (*loc, y, insn))
1933 abort ();
1935 /* Careful! First try to recognize a direct move of the
1936 value, mimicking how things are done in gen_reload wrt
1937 PLUS. Consider what happens when insn is a conditional
1938 move instruction and addsi3 clobbers flags. */
1940 start_sequence ();
1941 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1942 seq = get_insns ();
1943 end_sequence ();
1945 if (recog_memoized (new_insn) < 0)
1947 /* That failed. Fall back on force_operand and hope. */
1949 start_sequence ();
1950 sub = force_operand (sub, y);
1951 if (sub != y)
1952 emit_insn (gen_move_insn (y, sub));
1953 seq = get_insns ();
1954 end_sequence ();
1957 #ifdef HAVE_cc0
1958 /* Don't separate setter from user. */
1959 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1960 insn = PREV_INSN (insn);
1961 #endif
1963 emit_insn_before (seq, insn);
1966 return;
1968 case MEM:
1969 if (var == x)
1971 /* If we already have a replacement, use it. Otherwise,
1972 try to fix up this address in case it is invalid. */
1974 replacement = find_fixup_replacement (replacements, var);
1975 if (replacement->new)
1977 *loc = replacement->new;
1978 return;
1981 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1983 /* Unless we are forcing memory to register or we changed the mode,
1984 we can leave things the way they are if the insn is valid. */
1986 INSN_CODE (insn) = -1;
1987 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1988 && recog_memoized (insn) >= 0)
1989 return;
1991 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1992 return;
1995 /* If X contains VAR, we need to unshare it here so that we update
1996 each occurrence separately. But all identical MEMs in one insn
1997 must be replaced with the same rtx because of the possibility of
1998 MATCH_DUPs. */
2000 if (reg_mentioned_p (var, x))
2002 replacement = find_fixup_replacement (replacements, x);
2003 if (replacement->new == 0)
2004 replacement->new = copy_most_rtx (x, no_share);
2006 *loc = x = replacement->new;
2007 code = GET_CODE (x);
2009 break;
2011 case REG:
2012 case CC0:
2013 case PC:
2014 case CONST_INT:
2015 case CONST:
2016 case SYMBOL_REF:
2017 case LABEL_REF:
2018 case CONST_DOUBLE:
2019 case CONST_VECTOR:
2020 return;
2022 case SIGN_EXTRACT:
2023 case ZERO_EXTRACT:
2024 /* Note that in some cases those types of expressions are altered
2025 by optimize_bit_field, and do not survive to get here. */
2026 if (XEXP (x, 0) == var
2027 || (GET_CODE (XEXP (x, 0)) == SUBREG
2028 && SUBREG_REG (XEXP (x, 0)) == var))
2030 /* Get TEM as a valid MEM in the mode presently in the insn.
2032 We don't worry about the possibility of MATCH_DUP here; it
2033 is highly unlikely and would be tricky to handle. */
2035 tem = XEXP (x, 0);
2036 if (GET_CODE (tem) == SUBREG)
2038 if (GET_MODE_BITSIZE (GET_MODE (tem))
2039 > GET_MODE_BITSIZE (GET_MODE (var)))
2041 replacement = find_fixup_replacement (replacements, var);
2042 if (replacement->new == 0)
2043 replacement->new = gen_reg_rtx (GET_MODE (var));
2044 SUBREG_REG (tem) = replacement->new;
2046 /* The following code works only if we have a MEM, so we
2047 need to handle the subreg here. We directly substitute
2048 it assuming that a subreg must be OK here. We already
2049 scheduled a replacement to copy the mem into the
2050 subreg. */
2051 XEXP (x, 0) = tem;
2052 return;
2054 else
2055 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2057 else
2058 tem = fixup_stack_1 (tem, insn);
2060 /* Unless we want to load from memory, get TEM into the proper mode
2061 for an extract from memory. This can only be done if the
2062 extract is at a constant position and length. */
2064 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2065 && GET_CODE (XEXP (x, 2)) == CONST_INT
2066 && ! mode_dependent_address_p (XEXP (tem, 0))
2067 && ! MEM_VOLATILE_P (tem))
2069 enum machine_mode wanted_mode = VOIDmode;
2070 enum machine_mode is_mode = GET_MODE (tem);
2071 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2073 if (GET_CODE (x) == ZERO_EXTRACT)
2075 enum machine_mode new_mode
2076 = mode_for_extraction (EP_extzv, 1);
2077 if (new_mode != MAX_MACHINE_MODE)
2078 wanted_mode = new_mode;
2080 else if (GET_CODE (x) == SIGN_EXTRACT)
2082 enum machine_mode new_mode
2083 = mode_for_extraction (EP_extv, 1);
2084 if (new_mode != MAX_MACHINE_MODE)
2085 wanted_mode = new_mode;
2088 /* If we have a narrower mode, we can do something. */
2089 if (wanted_mode != VOIDmode
2090 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2092 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2093 rtx old_pos = XEXP (x, 2);
2094 rtx newmem;
2096 /* If the bytes and bits are counted differently, we
2097 must adjust the offset. */
2098 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2099 offset = (GET_MODE_SIZE (is_mode)
2100 - GET_MODE_SIZE (wanted_mode) - offset);
2102 pos %= GET_MODE_BITSIZE (wanted_mode);
2104 newmem = adjust_address_nv (tem, wanted_mode, offset);
2106 /* Make the change and see if the insn remains valid. */
2107 INSN_CODE (insn) = -1;
2108 XEXP (x, 0) = newmem;
2109 XEXP (x, 2) = GEN_INT (pos);
2111 if (recog_memoized (insn) >= 0)
2112 return;
2114 /* Otherwise, restore old position. XEXP (x, 0) will be
2115 restored later. */
2116 XEXP (x, 2) = old_pos;
2120 /* If we get here, the bitfield extract insn can't accept a memory
2121 reference. Copy the input into a register. */
2123 tem1 = gen_reg_rtx (GET_MODE (tem));
2124 emit_insn_before (gen_move_insn (tem1, tem), insn);
2125 XEXP (x, 0) = tem1;
2126 return;
2128 break;
2130 case SUBREG:
2131 if (SUBREG_REG (x) == var)
2133 /* If this is a special SUBREG made because VAR was promoted
2134 from a wider mode, replace it with VAR and call ourself
2135 recursively, this time saying that the object previously
2136 had its current mode (by virtue of the SUBREG). */
2138 if (SUBREG_PROMOTED_VAR_P (x))
2140 *loc = var;
2141 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2142 no_share);
2143 return;
2146 /* If this SUBREG makes VAR wider, it has become a paradoxical
2147 SUBREG with VAR in memory, but these aren't allowed at this
2148 stage of the compilation. So load VAR into a pseudo and take
2149 a SUBREG of that pseudo. */
2150 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2152 replacement = find_fixup_replacement (replacements, var);
2153 if (replacement->new == 0)
2154 replacement->new = gen_reg_rtx (promoted_mode);
2155 SUBREG_REG (x) = replacement->new;
2156 return;
2159 /* See if we have already found a replacement for this SUBREG.
2160 If so, use it. Otherwise, make a MEM and see if the insn
2161 is recognized. If not, or if we should force MEM into a register,
2162 make a pseudo for this SUBREG. */
2163 replacement = find_fixup_replacement (replacements, x);
2164 if (replacement->new)
2166 *loc = replacement->new;
2167 return;
2170 replacement->new = *loc = fixup_memory_subreg (x, insn,
2171 promoted_mode, 0);
2173 INSN_CODE (insn) = -1;
2174 if (! flag_force_mem && recog_memoized (insn) >= 0)
2175 return;
2177 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2178 return;
2180 break;
2182 case SET:
2183 /* First do special simplification of bit-field references. */
2184 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2185 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2186 optimize_bit_field (x, insn, 0);
2187 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2188 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2189 optimize_bit_field (x, insn, 0);
2191 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2192 into a register and then store it back out. */
2193 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2194 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2195 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2196 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2197 > GET_MODE_SIZE (GET_MODE (var))))
2199 replacement = find_fixup_replacement (replacements, var);
2200 if (replacement->new == 0)
2201 replacement->new = gen_reg_rtx (GET_MODE (var));
2203 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2204 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2207 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2208 insn into a pseudo and store the low part of the pseudo into VAR. */
2209 if (GET_CODE (SET_DEST (x)) == SUBREG
2210 && SUBREG_REG (SET_DEST (x)) == var
2211 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2212 > GET_MODE_SIZE (GET_MODE (var))))
2214 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2215 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2216 tem)),
2217 insn);
2218 break;
2222 rtx dest = SET_DEST (x);
2223 rtx src = SET_SRC (x);
2224 rtx outerdest = dest;
2226 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2227 || GET_CODE (dest) == SIGN_EXTRACT
2228 || GET_CODE (dest) == ZERO_EXTRACT)
2229 dest = XEXP (dest, 0);
2231 if (GET_CODE (src) == SUBREG)
2232 src = SUBREG_REG (src);
2234 /* If VAR does not appear at the top level of the SET
2235 just scan the lower levels of the tree. */
2237 if (src != var && dest != var)
2238 break;
2240 /* We will need to rerecognize this insn. */
2241 INSN_CODE (insn) = -1;
2243 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2244 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2246 /* Since this case will return, ensure we fixup all the
2247 operands here. */
2248 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2249 insn, replacements, no_share);
2250 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2251 insn, replacements, no_share);
2252 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2253 insn, replacements, no_share);
2255 tem = XEXP (outerdest, 0);
2257 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2258 that may appear inside a ZERO_EXTRACT.
2259 This was legitimate when the MEM was a REG. */
2260 if (GET_CODE (tem) == SUBREG
2261 && SUBREG_REG (tem) == var)
2262 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2263 else
2264 tem = fixup_stack_1 (tem, insn);
2266 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2267 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2268 && ! mode_dependent_address_p (XEXP (tem, 0))
2269 && ! MEM_VOLATILE_P (tem))
2271 enum machine_mode wanted_mode;
2272 enum machine_mode is_mode = GET_MODE (tem);
2273 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2275 wanted_mode = mode_for_extraction (EP_insv, 0);
2277 /* If we have a narrower mode, we can do something. */
2278 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2280 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2281 rtx old_pos = XEXP (outerdest, 2);
2282 rtx newmem;
2284 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2285 offset = (GET_MODE_SIZE (is_mode)
2286 - GET_MODE_SIZE (wanted_mode) - offset);
2288 pos %= GET_MODE_BITSIZE (wanted_mode);
2290 newmem = adjust_address_nv (tem, wanted_mode, offset);
2292 /* Make the change and see if the insn remains valid. */
2293 INSN_CODE (insn) = -1;
2294 XEXP (outerdest, 0) = newmem;
2295 XEXP (outerdest, 2) = GEN_INT (pos);
2297 if (recog_memoized (insn) >= 0)
2298 return;
2300 /* Otherwise, restore old position. XEXP (x, 0) will be
2301 restored later. */
2302 XEXP (outerdest, 2) = old_pos;
2306 /* If we get here, the bit-field store doesn't allow memory
2307 or isn't located at a constant position. Load the value into
2308 a register, do the store, and put it back into memory. */
2310 tem1 = gen_reg_rtx (GET_MODE (tem));
2311 emit_insn_before (gen_move_insn (tem1, tem), insn);
2312 emit_insn_after (gen_move_insn (tem, tem1), insn);
2313 XEXP (outerdest, 0) = tem1;
2314 return;
2317 /* STRICT_LOW_PART is a no-op on memory references
2318 and it can cause combinations to be unrecognizable,
2319 so eliminate it. */
2321 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2322 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2324 /* A valid insn to copy VAR into or out of a register
2325 must be left alone, to avoid an infinite loop here.
2326 If the reference to VAR is by a subreg, fix that up,
2327 since SUBREG is not valid for a memref.
2328 Also fix up the address of the stack slot.
2330 Note that we must not try to recognize the insn until
2331 after we know that we have valid addresses and no
2332 (subreg (mem ...) ...) constructs, since these interfere
2333 with determining the validity of the insn. */
2335 if ((SET_SRC (x) == var
2336 || (GET_CODE (SET_SRC (x)) == SUBREG
2337 && SUBREG_REG (SET_SRC (x)) == var))
2338 && (GET_CODE (SET_DEST (x)) == REG
2339 || (GET_CODE (SET_DEST (x)) == SUBREG
2340 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2341 && GET_MODE (var) == promoted_mode
2342 && x == single_set (insn))
2344 rtx pat, last;
2346 if (GET_CODE (SET_SRC (x)) == SUBREG
2347 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2348 > GET_MODE_SIZE (GET_MODE (var))))
2350 /* This (subreg VAR) is now a paradoxical subreg. We need
2351 to replace VAR instead of the subreg. */
2352 replacement = find_fixup_replacement (replacements, var);
2353 if (replacement->new == NULL_RTX)
2354 replacement->new = gen_reg_rtx (GET_MODE (var));
2355 SUBREG_REG (SET_SRC (x)) = replacement->new;
2357 else
2359 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2360 if (replacement->new)
2361 SET_SRC (x) = replacement->new;
2362 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2363 SET_SRC (x) = replacement->new
2364 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2366 else
2367 SET_SRC (x) = replacement->new
2368 = fixup_stack_1 (SET_SRC (x), insn);
2371 if (recog_memoized (insn) >= 0)
2372 return;
2374 /* INSN is not valid, but we know that we want to
2375 copy SET_SRC (x) to SET_DEST (x) in some way. So
2376 we generate the move and see whether it requires more
2377 than one insn. If it does, we emit those insns and
2378 delete INSN. Otherwise, we can just replace the pattern
2379 of INSN; we have already verified above that INSN has
2380 no other function that to do X. */
2382 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2383 if (NEXT_INSN (pat) != NULL_RTX)
2385 last = emit_insn_before (pat, insn);
2387 /* INSN might have REG_RETVAL or other important notes, so
2388 we need to store the pattern of the last insn in the
2389 sequence into INSN similarly to the normal case. LAST
2390 should not have REG_NOTES, but we allow them if INSN has
2391 no REG_NOTES. */
2392 if (REG_NOTES (last) && REG_NOTES (insn))
2393 abort ();
2394 if (REG_NOTES (last))
2395 REG_NOTES (insn) = REG_NOTES (last);
2396 PATTERN (insn) = PATTERN (last);
2398 delete_insn (last);
2400 else
2401 PATTERN (insn) = PATTERN (pat);
2403 return;
2406 if ((SET_DEST (x) == var
2407 || (GET_CODE (SET_DEST (x)) == SUBREG
2408 && SUBREG_REG (SET_DEST (x)) == var))
2409 && (GET_CODE (SET_SRC (x)) == REG
2410 || (GET_CODE (SET_SRC (x)) == SUBREG
2411 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2412 && GET_MODE (var) == promoted_mode
2413 && x == single_set (insn))
2415 rtx pat, last;
2417 if (GET_CODE (SET_DEST (x)) == SUBREG)
2418 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2419 promoted_mode, 0);
2420 else
2421 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2423 if (recog_memoized (insn) >= 0)
2424 return;
2426 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2427 if (NEXT_INSN (pat) != NULL_RTX)
2429 last = emit_insn_before (pat, insn);
2431 /* INSN might have REG_RETVAL or other important notes, so
2432 we need to store the pattern of the last insn in the
2433 sequence into INSN similarly to the normal case. LAST
2434 should not have REG_NOTES, but we allow them if INSN has
2435 no REG_NOTES. */
2436 if (REG_NOTES (last) && REG_NOTES (insn))
2437 abort ();
2438 if (REG_NOTES (last))
2439 REG_NOTES (insn) = REG_NOTES (last);
2440 PATTERN (insn) = PATTERN (last);
2442 delete_insn (last);
2444 else
2445 PATTERN (insn) = PATTERN (pat);
2447 return;
2450 /* Otherwise, storing into VAR must be handled specially
2451 by storing into a temporary and copying that into VAR
2452 with a new insn after this one. Note that this case
2453 will be used when storing into a promoted scalar since
2454 the insn will now have different modes on the input
2455 and output and hence will be invalid (except for the case
2456 of setting it to a constant, which does not need any
2457 change if it is valid). We generate extra code in that case,
2458 but combine.c will eliminate it. */
2460 if (dest == var)
2462 rtx temp;
2463 rtx fixeddest = SET_DEST (x);
2464 enum machine_mode temp_mode;
2466 /* STRICT_LOW_PART can be discarded, around a MEM. */
2467 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2468 fixeddest = XEXP (fixeddest, 0);
2469 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2470 if (GET_CODE (fixeddest) == SUBREG)
2472 fixeddest = fixup_memory_subreg (fixeddest, insn,
2473 promoted_mode, 0);
2474 temp_mode = GET_MODE (fixeddest);
2476 else
2478 fixeddest = fixup_stack_1 (fixeddest, insn);
2479 temp_mode = promoted_mode;
2482 temp = gen_reg_rtx (temp_mode);
2484 emit_insn_after (gen_move_insn (fixeddest,
2485 gen_lowpart (GET_MODE (fixeddest),
2486 temp)),
2487 insn);
2489 SET_DEST (x) = temp;
2493 default:
2494 break;
2497 /* Nothing special about this RTX; fix its operands. */
2499 fmt = GET_RTX_FORMAT (code);
2500 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2502 if (fmt[i] == 'e')
2503 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2504 no_share);
2505 else if (fmt[i] == 'E')
2507 int j;
2508 for (j = 0; j < XVECLEN (x, i); j++)
2509 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2510 insn, replacements, no_share);
2515 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2516 The REG was placed on the stack, so X now has the form (SUBREG:m1
2517 (MEM:m2 ...)).
2519 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2520 must be emitted to compute NEWADDR, put them before INSN.
2522 UNCRITICAL nonzero means accept paradoxical subregs.
2523 This is used for subregs found inside REG_NOTES. */
2525 static rtx
2526 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2527 rtx x;
2528 rtx insn;
2529 enum machine_mode promoted_mode;
2530 int uncritical;
2532 int offset;
2533 rtx mem = SUBREG_REG (x);
2534 rtx addr = XEXP (mem, 0);
2535 enum machine_mode mode = GET_MODE (x);
2536 rtx result, seq;
2538 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2539 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2540 abort ();
2542 offset = SUBREG_BYTE (x);
2543 if (BYTES_BIG_ENDIAN)
2544 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2545 the offset so that it points to the right location within the
2546 MEM. */
2547 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2549 if (!flag_force_addr
2550 && memory_address_p (mode, plus_constant (addr, offset)))
2551 /* Shortcut if no insns need be emitted. */
2552 return adjust_address (mem, mode, offset);
2554 start_sequence ();
2555 result = adjust_address (mem, mode, offset);
2556 seq = get_insns ();
2557 end_sequence ();
2559 emit_insn_before (seq, insn);
2560 return result;
2563 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2564 Replace subexpressions of X in place.
2565 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2566 Otherwise return X, with its contents possibly altered.
2568 INSN, PROMOTED_MODE and UNCRITICAL are as for
2569 fixup_memory_subreg. */
2571 static rtx
2572 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2573 rtx x;
2574 rtx insn;
2575 enum machine_mode promoted_mode;
2576 int uncritical;
2578 enum rtx_code code;
2579 const char *fmt;
2580 int i;
2582 if (x == 0)
2583 return 0;
2585 code = GET_CODE (x);
2587 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2588 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2590 /* Nothing special about this RTX; fix its operands. */
2592 fmt = GET_RTX_FORMAT (code);
2593 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2595 if (fmt[i] == 'e')
2596 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2597 promoted_mode, uncritical);
2598 else if (fmt[i] == 'E')
2600 int j;
2601 for (j = 0; j < XVECLEN (x, i); j++)
2602 XVECEXP (x, i, j)
2603 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2604 promoted_mode, uncritical);
2607 return x;
2610 /* For each memory ref within X, if it refers to a stack slot
2611 with an out of range displacement, put the address in a temp register
2612 (emitting new insns before INSN to load these registers)
2613 and alter the memory ref to use that register.
2614 Replace each such MEM rtx with a copy, to avoid clobberage. */
2616 static rtx
2617 fixup_stack_1 (x, insn)
2618 rtx x;
2619 rtx insn;
2621 int i;
2622 RTX_CODE code = GET_CODE (x);
2623 const char *fmt;
2625 if (code == MEM)
2627 rtx ad = XEXP (x, 0);
2628 /* If we have address of a stack slot but it's not valid
2629 (displacement is too large), compute the sum in a register. */
2630 if (GET_CODE (ad) == PLUS
2631 && GET_CODE (XEXP (ad, 0)) == REG
2632 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2633 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2634 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2635 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2636 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2637 #endif
2638 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2639 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2640 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2641 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2643 rtx temp, seq;
2644 if (memory_address_p (GET_MODE (x), ad))
2645 return x;
2647 start_sequence ();
2648 temp = copy_to_reg (ad);
2649 seq = get_insns ();
2650 end_sequence ();
2651 emit_insn_before (seq, insn);
2652 return replace_equiv_address (x, temp);
2654 return x;
2657 fmt = GET_RTX_FORMAT (code);
2658 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2660 if (fmt[i] == 'e')
2661 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2662 else if (fmt[i] == 'E')
2664 int j;
2665 for (j = 0; j < XVECLEN (x, i); j++)
2666 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2669 return x;
2672 /* Optimization: a bit-field instruction whose field
2673 happens to be a byte or halfword in memory
2674 can be changed to a move instruction.
2676 We call here when INSN is an insn to examine or store into a bit-field.
2677 BODY is the SET-rtx to be altered.
2679 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2680 (Currently this is called only from function.c, and EQUIV_MEM
2681 is always 0.) */
2683 static void
2684 optimize_bit_field (body, insn, equiv_mem)
2685 rtx body;
2686 rtx insn;
2687 rtx *equiv_mem;
2689 rtx bitfield;
2690 int destflag;
2691 rtx seq = 0;
2692 enum machine_mode mode;
2694 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2695 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2696 bitfield = SET_DEST (body), destflag = 1;
2697 else
2698 bitfield = SET_SRC (body), destflag = 0;
2700 /* First check that the field being stored has constant size and position
2701 and is in fact a byte or halfword suitably aligned. */
2703 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2704 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2705 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2706 != BLKmode)
2707 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2709 rtx memref = 0;
2711 /* Now check that the containing word is memory, not a register,
2712 and that it is safe to change the machine mode. */
2714 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2715 memref = XEXP (bitfield, 0);
2716 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2717 && equiv_mem != 0)
2718 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2719 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2720 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2721 memref = SUBREG_REG (XEXP (bitfield, 0));
2722 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2723 && equiv_mem != 0
2724 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2725 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2727 if (memref
2728 && ! mode_dependent_address_p (XEXP (memref, 0))
2729 && ! MEM_VOLATILE_P (memref))
2731 /* Now adjust the address, first for any subreg'ing
2732 that we are now getting rid of,
2733 and then for which byte of the word is wanted. */
2735 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2736 rtx insns;
2738 /* Adjust OFFSET to count bits from low-address byte. */
2739 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2740 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2741 - offset - INTVAL (XEXP (bitfield, 1)));
2743 /* Adjust OFFSET to count bytes from low-address byte. */
2744 offset /= BITS_PER_UNIT;
2745 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2747 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2748 / UNITS_PER_WORD) * UNITS_PER_WORD;
2749 if (BYTES_BIG_ENDIAN)
2750 offset -= (MIN (UNITS_PER_WORD,
2751 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2752 - MIN (UNITS_PER_WORD,
2753 GET_MODE_SIZE (GET_MODE (memref))));
2756 start_sequence ();
2757 memref = adjust_address (memref, mode, offset);
2758 insns = get_insns ();
2759 end_sequence ();
2760 emit_insn_before (insns, insn);
2762 /* Store this memory reference where
2763 we found the bit field reference. */
2765 if (destflag)
2767 validate_change (insn, &SET_DEST (body), memref, 1);
2768 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2770 rtx src = SET_SRC (body);
2771 while (GET_CODE (src) == SUBREG
2772 && SUBREG_BYTE (src) == 0)
2773 src = SUBREG_REG (src);
2774 if (GET_MODE (src) != GET_MODE (memref))
2775 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2776 validate_change (insn, &SET_SRC (body), src, 1);
2778 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2779 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2780 /* This shouldn't happen because anything that didn't have
2781 one of these modes should have got converted explicitly
2782 and then referenced through a subreg.
2783 This is so because the original bit-field was
2784 handled by agg_mode and so its tree structure had
2785 the same mode that memref now has. */
2786 abort ();
2788 else
2790 rtx dest = SET_DEST (body);
2792 while (GET_CODE (dest) == SUBREG
2793 && SUBREG_BYTE (dest) == 0
2794 && (GET_MODE_CLASS (GET_MODE (dest))
2795 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2796 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2797 <= UNITS_PER_WORD))
2798 dest = SUBREG_REG (dest);
2800 validate_change (insn, &SET_DEST (body), dest, 1);
2802 if (GET_MODE (dest) == GET_MODE (memref))
2803 validate_change (insn, &SET_SRC (body), memref, 1);
2804 else
2806 /* Convert the mem ref to the destination mode. */
2807 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2809 start_sequence ();
2810 convert_move (newreg, memref,
2811 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2812 seq = get_insns ();
2813 end_sequence ();
2815 validate_change (insn, &SET_SRC (body), newreg, 1);
2819 /* See if we can convert this extraction or insertion into
2820 a simple move insn. We might not be able to do so if this
2821 was, for example, part of a PARALLEL.
2823 If we succeed, write out any needed conversions. If we fail,
2824 it is hard to guess why we failed, so don't do anything
2825 special; just let the optimization be suppressed. */
2827 if (apply_change_group () && seq)
2828 emit_insn_before (seq, insn);
2833 /* These routines are responsible for converting virtual register references
2834 to the actual hard register references once RTL generation is complete.
2836 The following four variables are used for communication between the
2837 routines. They contain the offsets of the virtual registers from their
2838 respective hard registers. */
2840 static int in_arg_offset;
2841 static int var_offset;
2842 static int dynamic_offset;
2843 static int out_arg_offset;
2844 static int cfa_offset;
2846 /* In most machines, the stack pointer register is equivalent to the bottom
2847 of the stack. */
2849 #ifndef STACK_POINTER_OFFSET
2850 #define STACK_POINTER_OFFSET 0
2851 #endif
2853 /* If not defined, pick an appropriate default for the offset of dynamically
2854 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2855 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2857 #ifndef STACK_DYNAMIC_OFFSET
2859 /* The bottom of the stack points to the actual arguments. If
2860 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2861 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2862 stack space for register parameters is not pushed by the caller, but
2863 rather part of the fixed stack areas and hence not included in
2864 `current_function_outgoing_args_size'. Nevertheless, we must allow
2865 for it when allocating stack dynamic objects. */
2867 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2868 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2869 ((ACCUMULATE_OUTGOING_ARGS \
2870 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2871 + (STACK_POINTER_OFFSET)) \
2873 #else
2874 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2875 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2876 + (STACK_POINTER_OFFSET))
2877 #endif
2878 #endif
2880 /* On most machines, the CFA coincides with the first incoming parm. */
2882 #ifndef ARG_POINTER_CFA_OFFSET
2883 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2884 #endif
2886 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2887 had its address taken. DECL is the decl or SAVE_EXPR for the
2888 object stored in the register, for later use if we do need to force
2889 REG into the stack. REG is overwritten by the MEM like in
2890 put_reg_into_stack. RESCAN is true if previously emitted
2891 instructions must be rescanned and modified now that the REG has
2892 been transformed. */
2895 gen_mem_addressof (reg, decl, rescan)
2896 rtx reg;
2897 tree decl;
2898 int rescan;
2900 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2901 REGNO (reg), decl);
2903 /* Calculate this before we start messing with decl's RTL. */
2904 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2906 /* If the original REG was a user-variable, then so is the REG whose
2907 address is being taken. Likewise for unchanging. */
2908 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2909 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2911 PUT_CODE (reg, MEM);
2912 MEM_ATTRS (reg) = 0;
2913 XEXP (reg, 0) = r;
2915 if (decl)
2917 tree type = TREE_TYPE (decl);
2918 enum machine_mode decl_mode
2919 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2920 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2921 : DECL_RTL_IF_SET (decl));
2923 PUT_MODE (reg, decl_mode);
2925 /* Clear DECL_RTL momentarily so functions below will work
2926 properly, then set it again. */
2927 if (DECL_P (decl) && decl_rtl == reg)
2928 SET_DECL_RTL (decl, 0);
2930 set_mem_attributes (reg, decl, 1);
2931 set_mem_alias_set (reg, set);
2933 if (DECL_P (decl) && decl_rtl == reg)
2934 SET_DECL_RTL (decl, reg);
2936 if (rescan
2937 && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
2938 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2940 else if (rescan)
2941 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2943 return reg;
2946 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2948 void
2949 flush_addressof (decl)
2950 tree decl;
2952 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2953 && DECL_RTL (decl) != 0
2954 && GET_CODE (DECL_RTL (decl)) == MEM
2955 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2956 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2957 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2960 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2962 static void
2963 put_addressof_into_stack (r, ht)
2964 rtx r;
2965 htab_t ht;
2967 tree decl, type;
2968 int volatile_p, used_p;
2970 rtx reg = XEXP (r, 0);
2972 if (GET_CODE (reg) != REG)
2973 abort ();
2975 decl = ADDRESSOF_DECL (r);
2976 if (decl)
2978 type = TREE_TYPE (decl);
2979 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2980 && TREE_THIS_VOLATILE (decl));
2981 used_p = (TREE_USED (decl)
2982 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2984 else
2986 type = NULL_TREE;
2987 volatile_p = 0;
2988 used_p = 1;
2991 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2992 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2995 /* List of replacements made below in purge_addressof_1 when creating
2996 bitfield insertions. */
2997 static rtx purge_bitfield_addressof_replacements;
2999 /* List of replacements made below in purge_addressof_1 for patterns
3000 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3001 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3002 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3003 enough in complex cases, e.g. when some field values can be
3004 extracted by usage MEM with narrower mode. */
3005 static rtx purge_addressof_replacements;
3007 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3008 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3009 the stack. If the function returns FALSE then the replacement could not
3010 be made. If MAY_POSTPONE is true and we would not put the addressof
3011 to stack, postpone processing of the insn. */
3013 static bool
3014 purge_addressof_1 (loc, insn, force, store, may_postpone, ht)
3015 rtx *loc;
3016 rtx insn;
3017 int force, store, may_postpone;
3018 htab_t ht;
3020 rtx x;
3021 RTX_CODE code;
3022 int i, j;
3023 const char *fmt;
3024 bool result = true;
3026 /* Re-start here to avoid recursion in common cases. */
3027 restart:
3029 x = *loc;
3030 if (x == 0)
3031 return true;
3033 code = GET_CODE (x);
3035 /* If we don't return in any of the cases below, we will recurse inside
3036 the RTX, which will normally result in any ADDRESSOF being forced into
3037 memory. */
3038 if (code == SET)
3040 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1,
3041 may_postpone, ht);
3042 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0,
3043 may_postpone, ht);
3044 return result;
3046 else if (code == ADDRESSOF)
3048 rtx sub, insns;
3050 if (GET_CODE (XEXP (x, 0)) != MEM)
3051 put_addressof_into_stack (x, ht);
3053 /* We must create a copy of the rtx because it was created by
3054 overwriting a REG rtx which is always shared. */
3055 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3056 if (validate_change (insn, loc, sub, 0)
3057 || validate_replace_rtx (x, sub, insn))
3058 return true;
3060 start_sequence ();
3062 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3063 Otherwise, perhaps SUB is an expression, so generate code to compute
3064 it. */
3065 if (GET_CODE (sub) == REG && REGNO (sub) <= LAST_VIRTUAL_REGISTER)
3066 sub = copy_to_reg (sub);
3067 else
3068 sub = force_operand (sub, NULL_RTX);
3070 if (! validate_change (insn, loc, sub, 0)
3071 && ! validate_replace_rtx (x, sub, insn))
3072 abort ();
3074 insns = get_insns ();
3075 end_sequence ();
3076 emit_insn_before (insns, insn);
3077 return true;
3080 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3082 rtx sub = XEXP (XEXP (x, 0), 0);
3084 if (GET_CODE (sub) == MEM)
3085 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3086 else if (GET_CODE (sub) == REG
3087 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3089 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3091 int size_x, size_sub;
3093 if (may_postpone)
3095 /* Postpone for now, so that we do not emit bitfield arithmetics
3096 unless there is some benefit from it. */
3097 if (!postponed_insns || XEXP (postponed_insns, 0) != insn)
3098 postponed_insns = alloc_INSN_LIST (insn, postponed_insns);
3099 return true;
3102 if (!insn)
3104 /* When processing REG_NOTES look at the list of
3105 replacements done on the insn to find the register that X
3106 was replaced by. */
3107 rtx tem;
3109 for (tem = purge_bitfield_addressof_replacements;
3110 tem != NULL_RTX;
3111 tem = XEXP (XEXP (tem, 1), 1))
3112 if (rtx_equal_p (x, XEXP (tem, 0)))
3114 *loc = XEXP (XEXP (tem, 1), 0);
3115 return true;
3118 /* See comment for purge_addressof_replacements. */
3119 for (tem = purge_addressof_replacements;
3120 tem != NULL_RTX;
3121 tem = XEXP (XEXP (tem, 1), 1))
3122 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3124 rtx z = XEXP (XEXP (tem, 1), 0);
3126 if (GET_MODE (x) == GET_MODE (z)
3127 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3128 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3129 abort ();
3131 /* It can happen that the note may speak of things
3132 in a wider (or just different) mode than the
3133 code did. This is especially true of
3134 REG_RETVAL. */
3136 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3137 z = SUBREG_REG (z);
3139 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3140 && (GET_MODE_SIZE (GET_MODE (x))
3141 > GET_MODE_SIZE (GET_MODE (z))))
3143 /* This can occur as a result in invalid
3144 pointer casts, e.g. float f; ...
3145 *(long long int *)&f.
3146 ??? We could emit a warning here, but
3147 without a line number that wouldn't be
3148 very helpful. */
3149 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3151 else
3152 z = gen_lowpart (GET_MODE (x), z);
3154 *loc = z;
3155 return true;
3158 /* When we are processing the REG_NOTES of the last instruction
3159 of a libcall, there will be typically no replacements
3160 for that insn; the replacements happened before, piecemeal
3161 fashion. OTOH we are not interested in the details of
3162 this for the REG_EQUAL note, we want to know the big picture,
3163 which can be succinctly described with a simple SUBREG.
3164 Note that removing the REG_EQUAL note is not an option
3165 on the last insn of a libcall, so we must do a replacement. */
3166 if (! purge_addressof_replacements
3167 && ! purge_bitfield_addressof_replacements)
3169 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3170 we got
3171 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3172 [0 S8 A32]), which can be expressed with a simple
3173 same-size subreg */
3174 if ((GET_MODE_SIZE (GET_MODE (x))
3175 == GET_MODE_SIZE (GET_MODE (sub)))
3176 /* Again, invalid pointer casts (as in
3177 compile/990203-1.c) can require paradoxical
3178 subregs. */
3179 || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3180 && (GET_MODE_SIZE (GET_MODE (x))
3181 > GET_MODE_SIZE (GET_MODE (sub)))))
3183 *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
3184 return true;
3186 /* ??? Are there other cases we should handle? */
3188 /* Sometimes we may not be able to find the replacement. For
3189 example when the original insn was a MEM in a wider mode,
3190 and the note is part of a sign extension of a narrowed
3191 version of that MEM. Gcc testcase compile/990829-1.c can
3192 generate an example of this situation. Rather than complain
3193 we return false, which will prompt our caller to remove the
3194 offending note. */
3195 return false;
3198 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3199 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3201 /* Do not frob unchanging MEMs. If a later reference forces the
3202 pseudo to the stack, we can wind up with multiple writes to
3203 an unchanging memory, which is invalid. */
3204 if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3207 /* Don't even consider working with paradoxical subregs,
3208 or the moral equivalent seen here. */
3209 else if (size_x <= size_sub
3210 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3212 /* Do a bitfield insertion to mirror what would happen
3213 in memory. */
3215 rtx val, seq;
3217 if (store)
3219 rtx p = PREV_INSN (insn);
3221 start_sequence ();
3222 val = gen_reg_rtx (GET_MODE (x));
3223 if (! validate_change (insn, loc, val, 0))
3225 /* Discard the current sequence and put the
3226 ADDRESSOF on stack. */
3227 end_sequence ();
3228 goto give_up;
3230 seq = get_insns ();
3231 end_sequence ();
3232 emit_insn_before (seq, insn);
3233 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3234 insn, ht);
3236 start_sequence ();
3237 store_bit_field (sub, size_x, 0, GET_MODE (x),
3238 val, GET_MODE_SIZE (GET_MODE (sub)));
3240 /* Make sure to unshare any shared rtl that store_bit_field
3241 might have created. */
3242 unshare_all_rtl_again (get_insns ());
3244 seq = get_insns ();
3245 end_sequence ();
3246 p = emit_insn_after (seq, insn);
3247 if (NEXT_INSN (insn))
3248 compute_insns_for_mem (NEXT_INSN (insn),
3249 p ? NEXT_INSN (p) : NULL_RTX,
3250 ht);
3252 else
3254 rtx p = PREV_INSN (insn);
3256 start_sequence ();
3257 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3258 GET_MODE (x), GET_MODE (x),
3259 GET_MODE_SIZE (GET_MODE (sub)));
3261 if (! validate_change (insn, loc, val, 0))
3263 /* Discard the current sequence and put the
3264 ADDRESSOF on stack. */
3265 end_sequence ();
3266 goto give_up;
3269 seq = get_insns ();
3270 end_sequence ();
3271 emit_insn_before (seq, insn);
3272 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3273 insn, ht);
3276 /* Remember the replacement so that the same one can be done
3277 on the REG_NOTES. */
3278 purge_bitfield_addressof_replacements
3279 = gen_rtx_EXPR_LIST (VOIDmode, x,
3280 gen_rtx_EXPR_LIST
3281 (VOIDmode, val,
3282 purge_bitfield_addressof_replacements));
3284 /* We replaced with a reg -- all done. */
3285 return true;
3289 else if (validate_change (insn, loc, sub, 0))
3291 /* Remember the replacement so that the same one can be done
3292 on the REG_NOTES. */
3293 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3295 rtx tem;
3297 for (tem = purge_addressof_replacements;
3298 tem != NULL_RTX;
3299 tem = XEXP (XEXP (tem, 1), 1))
3300 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3302 XEXP (XEXP (tem, 1), 0) = sub;
3303 return true;
3305 purge_addressof_replacements
3306 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3307 gen_rtx_EXPR_LIST (VOIDmode, sub,
3308 purge_addressof_replacements));
3309 return true;
3311 goto restart;
3315 give_up:
3316 /* Scan all subexpressions. */
3317 fmt = GET_RTX_FORMAT (code);
3318 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3320 if (*fmt == 'e')
3321 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0,
3322 may_postpone, ht);
3323 else if (*fmt == 'E')
3324 for (j = 0; j < XVECLEN (x, i); j++)
3325 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0,
3326 may_postpone, ht);
3329 return result;
3332 /* Return a hash value for K, a REG. */
3334 static hashval_t
3335 insns_for_mem_hash (k)
3336 const void * k;
3338 /* Use the address of the key for the hash value. */
3339 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3340 return htab_hash_pointer (m->key);
3343 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3345 static int
3346 insns_for_mem_comp (k1, k2)
3347 const void * k1;
3348 const void * k2;
3350 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3351 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3352 return m1->key == m2->key;
3355 struct insns_for_mem_walk_info
3357 /* The hash table that we are using to record which INSNs use which
3358 MEMs. */
3359 htab_t ht;
3361 /* The INSN we are currently processing. */
3362 rtx insn;
3364 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3365 to find the insns that use the REGs in the ADDRESSOFs. */
3366 int pass;
3369 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3370 that might be used in an ADDRESSOF expression, record this INSN in
3371 the hash table given by DATA (which is really a pointer to an
3372 insns_for_mem_walk_info structure). */
3374 static int
3375 insns_for_mem_walk (r, data)
3376 rtx *r;
3377 void *data;
3379 struct insns_for_mem_walk_info *ifmwi
3380 = (struct insns_for_mem_walk_info *) data;
3381 struct insns_for_mem_entry tmp;
3382 tmp.insns = NULL_RTX;
3384 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3385 && GET_CODE (XEXP (*r, 0)) == REG)
3387 void **e;
3388 tmp.key = XEXP (*r, 0);
3389 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3390 if (*e == NULL)
3392 *e = ggc_alloc (sizeof (tmp));
3393 memcpy (*e, &tmp, sizeof (tmp));
3396 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3398 struct insns_for_mem_entry *ifme;
3399 tmp.key = *r;
3400 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3402 /* If we have not already recorded this INSN, do so now. Since
3403 we process the INSNs in order, we know that if we have
3404 recorded it it must be at the front of the list. */
3405 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3406 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3407 ifme->insns);
3410 return 0;
3413 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3414 which REGs in HT. */
3416 static void
3417 compute_insns_for_mem (insns, last_insn, ht)
3418 rtx insns;
3419 rtx last_insn;
3420 htab_t ht;
3422 rtx insn;
3423 struct insns_for_mem_walk_info ifmwi;
3424 ifmwi.ht = ht;
3426 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3427 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3428 if (INSN_P (insn))
3430 ifmwi.insn = insn;
3431 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3435 /* Helper function for purge_addressof called through for_each_rtx.
3436 Returns true iff the rtl is an ADDRESSOF. */
3438 static int
3439 is_addressof (rtl, data)
3440 rtx *rtl;
3441 void *data ATTRIBUTE_UNUSED;
3443 return GET_CODE (*rtl) == ADDRESSOF;
3446 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3447 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3448 stack. */
3450 void
3451 purge_addressof (insns)
3452 rtx insns;
3454 rtx insn, tmp;
3455 htab_t ht;
3457 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3458 requires a fixup pass over the instruction stream to correct
3459 INSNs that depended on the REG being a REG, and not a MEM. But,
3460 these fixup passes are slow. Furthermore, most MEMs are not
3461 mentioned in very many instructions. So, we speed up the process
3462 by pre-calculating which REGs occur in which INSNs; that allows
3463 us to perform the fixup passes much more quickly. */
3464 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3465 compute_insns_for_mem (insns, NULL_RTX, ht);
3467 postponed_insns = NULL;
3469 for (insn = insns; insn; insn = NEXT_INSN (insn))
3470 if (INSN_P (insn))
3472 if (! purge_addressof_1 (&PATTERN (insn), insn,
3473 asm_noperands (PATTERN (insn)) > 0, 0, 1, ht))
3474 /* If we could not replace the ADDRESSOFs in the insn,
3475 something is wrong. */
3476 abort ();
3478 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, 0, ht))
3480 /* If we could not replace the ADDRESSOFs in the insn's notes,
3481 we can just remove the offending notes instead. */
3482 rtx note;
3484 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3486 /* If we find a REG_RETVAL note then the insn is a libcall.
3487 Such insns must have REG_EQUAL notes as well, in order
3488 for later passes of the compiler to work. So it is not
3489 safe to delete the notes here, and instead we abort. */
3490 if (REG_NOTE_KIND (note) == REG_RETVAL)
3491 abort ();
3492 if (for_each_rtx (&note, is_addressof, NULL))
3493 remove_note (insn, note);
3498 /* Process the postponed insns. */
3499 while (postponed_insns)
3501 insn = XEXP (postponed_insns, 0);
3502 tmp = postponed_insns;
3503 postponed_insns = XEXP (postponed_insns, 1);
3504 free_INSN_LIST_node (tmp);
3506 if (! purge_addressof_1 (&PATTERN (insn), insn,
3507 asm_noperands (PATTERN (insn)) > 0, 0, 0, ht))
3508 abort ();
3511 /* Clean up. */
3512 purge_bitfield_addressof_replacements = 0;
3513 purge_addressof_replacements = 0;
3515 /* REGs are shared. purge_addressof will destructively replace a REG
3516 with a MEM, which creates shared MEMs.
3518 Unfortunately, the children of put_reg_into_stack assume that MEMs
3519 referring to the same stack slot are shared (fixup_var_refs and
3520 the associated hash table code).
3522 So, we have to do another unsharing pass after we have flushed any
3523 REGs that had their address taken into the stack.
3525 It may be worth tracking whether or not we converted any REGs into
3526 MEMs to avoid this overhead when it is not needed. */
3527 unshare_all_rtl_again (get_insns ());
3530 /* Convert a SET of a hard subreg to a set of the appropriate hard
3531 register. A subroutine of purge_hard_subreg_sets. */
3533 static void
3534 purge_single_hard_subreg_set (pattern)
3535 rtx pattern;
3537 rtx reg = SET_DEST (pattern);
3538 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3539 int offset = 0;
3541 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3542 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3544 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3545 GET_MODE (SUBREG_REG (reg)),
3546 SUBREG_BYTE (reg),
3547 GET_MODE (reg));
3548 reg = SUBREG_REG (reg);
3552 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3554 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3555 SET_DEST (pattern) = reg;
3559 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3560 only such SETs that we expect to see are those left in because
3561 integrate can't handle sets of parts of a return value register.
3563 We don't use alter_subreg because we only want to eliminate subregs
3564 of hard registers. */
3566 void
3567 purge_hard_subreg_sets (insn)
3568 rtx insn;
3570 for (; insn; insn = NEXT_INSN (insn))
3572 if (INSN_P (insn))
3574 rtx pattern = PATTERN (insn);
3575 switch (GET_CODE (pattern))
3577 case SET:
3578 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3579 purge_single_hard_subreg_set (pattern);
3580 break;
3581 case PARALLEL:
3583 int j;
3584 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3586 rtx inner_pattern = XVECEXP (pattern, 0, j);
3587 if (GET_CODE (inner_pattern) == SET
3588 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3589 purge_single_hard_subreg_set (inner_pattern);
3592 break;
3593 default:
3594 break;
3600 /* Pass through the INSNS of function FNDECL and convert virtual register
3601 references to hard register references. */
3603 void
3604 instantiate_virtual_regs (fndecl, insns)
3605 tree fndecl;
3606 rtx insns;
3608 rtx insn;
3609 unsigned int i;
3611 /* Compute the offsets to use for this function. */
3612 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3613 var_offset = STARTING_FRAME_OFFSET;
3614 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3615 out_arg_offset = STACK_POINTER_OFFSET;
3616 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3618 /* Scan all variables and parameters of this function. For each that is
3619 in memory, instantiate all virtual registers if the result is a valid
3620 address. If not, we do it later. That will handle most uses of virtual
3621 regs on many machines. */
3622 instantiate_decls (fndecl, 1);
3624 /* Initialize recognition, indicating that volatile is OK. */
3625 init_recog ();
3627 /* Scan through all the insns, instantiating every virtual register still
3628 present. */
3629 for (insn = insns; insn; insn = NEXT_INSN (insn))
3630 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3631 || GET_CODE (insn) == CALL_INSN)
3633 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3634 if (INSN_DELETED_P (insn))
3635 continue;
3636 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3637 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3638 if (GET_CODE (insn) == CALL_INSN)
3639 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3640 NULL_RTX, 0);
3642 /* Past this point all ASM statements should match. Verify that
3643 to avoid failures later in the compilation process. */
3644 if (asm_noperands (PATTERN (insn)) >= 0
3645 && ! check_asm_operands (PATTERN (insn)))
3646 instantiate_virtual_regs_lossage (insn);
3649 /* Instantiate the stack slots for the parm registers, for later use in
3650 addressof elimination. */
3651 for (i = 0; i < max_parm_reg; ++i)
3652 if (parm_reg_stack_loc[i])
3653 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3655 /* Now instantiate the remaining register equivalences for debugging info.
3656 These will not be valid addresses. */
3657 instantiate_decls (fndecl, 0);
3659 /* Indicate that, from now on, assign_stack_local should use
3660 frame_pointer_rtx. */
3661 virtuals_instantiated = 1;
3664 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3665 all virtual registers in their DECL_RTL's.
3667 If VALID_ONLY, do this only if the resulting address is still valid.
3668 Otherwise, always do it. */
3670 static void
3671 instantiate_decls (fndecl, valid_only)
3672 tree fndecl;
3673 int valid_only;
3675 tree decl;
3677 /* Process all parameters of the function. */
3678 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3680 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3681 HOST_WIDE_INT size_rtl;
3683 instantiate_decl (DECL_RTL (decl), size, valid_only);
3685 /* If the parameter was promoted, then the incoming RTL mode may be
3686 larger than the declared type size. We must use the larger of
3687 the two sizes. */
3688 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3689 size = MAX (size_rtl, size);
3690 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3693 /* Now process all variables defined in the function or its subblocks. */
3694 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3697 /* Subroutine of instantiate_decls: Process all decls in the given
3698 BLOCK node and all its subblocks. */
3700 static void
3701 instantiate_decls_1 (let, valid_only)
3702 tree let;
3703 int valid_only;
3705 tree t;
3707 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3708 if (DECL_RTL_SET_P (t))
3709 instantiate_decl (DECL_RTL (t),
3710 int_size_in_bytes (TREE_TYPE (t)),
3711 valid_only);
3713 /* Process all subblocks. */
3714 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3715 instantiate_decls_1 (t, valid_only);
3718 /* Subroutine of the preceding procedures: Given RTL representing a
3719 decl and the size of the object, do any instantiation required.
3721 If VALID_ONLY is nonzero, it means that the RTL should only be
3722 changed if the new address is valid. */
3724 static void
3725 instantiate_decl (x, size, valid_only)
3726 rtx x;
3727 HOST_WIDE_INT size;
3728 int valid_only;
3730 enum machine_mode mode;
3731 rtx addr;
3733 /* If this is not a MEM, no need to do anything. Similarly if the
3734 address is a constant or a register that is not a virtual register. */
3736 if (x == 0 || GET_CODE (x) != MEM)
3737 return;
3739 addr = XEXP (x, 0);
3740 if (CONSTANT_P (addr)
3741 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3742 || (GET_CODE (addr) == REG
3743 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3744 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3745 return;
3747 /* If we should only do this if the address is valid, copy the address.
3748 We need to do this so we can undo any changes that might make the
3749 address invalid. This copy is unfortunate, but probably can't be
3750 avoided. */
3752 if (valid_only)
3753 addr = copy_rtx (addr);
3755 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3757 if (valid_only && size >= 0)
3759 unsigned HOST_WIDE_INT decl_size = size;
3761 /* Now verify that the resulting address is valid for every integer or
3762 floating-point mode up to and including SIZE bytes long. We do this
3763 since the object might be accessed in any mode and frame addresses
3764 are shared. */
3766 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3767 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3768 mode = GET_MODE_WIDER_MODE (mode))
3769 if (! memory_address_p (mode, addr))
3770 return;
3772 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3773 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3774 mode = GET_MODE_WIDER_MODE (mode))
3775 if (! memory_address_p (mode, addr))
3776 return;
3779 /* Put back the address now that we have updated it and we either know
3780 it is valid or we don't care whether it is valid. */
3782 XEXP (x, 0) = addr;
3785 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3786 is a virtual register, return the equivalent hard register and set the
3787 offset indirectly through the pointer. Otherwise, return 0. */
3789 static rtx
3790 instantiate_new_reg (x, poffset)
3791 rtx x;
3792 HOST_WIDE_INT *poffset;
3794 rtx new;
3795 HOST_WIDE_INT offset;
3797 if (x == virtual_incoming_args_rtx)
3798 new = arg_pointer_rtx, offset = in_arg_offset;
3799 else if (x == virtual_stack_vars_rtx)
3800 new = frame_pointer_rtx, offset = var_offset;
3801 else if (x == virtual_stack_dynamic_rtx)
3802 new = stack_pointer_rtx, offset = dynamic_offset;
3803 else if (x == virtual_outgoing_args_rtx)
3804 new = stack_pointer_rtx, offset = out_arg_offset;
3805 else if (x == virtual_cfa_rtx)
3806 new = arg_pointer_rtx, offset = cfa_offset;
3807 else
3808 return 0;
3810 *poffset = offset;
3811 return new;
3815 /* Called when instantiate_virtual_regs has failed to update the instruction.
3816 Usually this means that non-matching instruction has been emit, however for
3817 asm statements it may be the problem in the constraints. */
3818 static void
3819 instantiate_virtual_regs_lossage (insn)
3820 rtx insn;
3822 if (asm_noperands (PATTERN (insn)) >= 0)
3824 error_for_asm (insn, "impossible constraint in `asm'");
3825 delete_insn (insn);
3827 else
3828 abort ();
3830 /* Given a pointer to a piece of rtx and an optional pointer to the
3831 containing object, instantiate any virtual registers present in it.
3833 If EXTRA_INSNS, we always do the replacement and generate
3834 any extra insns before OBJECT. If it zero, we do nothing if replacement
3835 is not valid.
3837 Return 1 if we either had nothing to do or if we were able to do the
3838 needed replacement. Return 0 otherwise; we only return zero if
3839 EXTRA_INSNS is zero.
3841 We first try some simple transformations to avoid the creation of extra
3842 pseudos. */
3844 static int
3845 instantiate_virtual_regs_1 (loc, object, extra_insns)
3846 rtx *loc;
3847 rtx object;
3848 int extra_insns;
3850 rtx x;
3851 RTX_CODE code;
3852 rtx new = 0;
3853 HOST_WIDE_INT offset = 0;
3854 rtx temp;
3855 rtx seq;
3856 int i, j;
3857 const char *fmt;
3859 /* Re-start here to avoid recursion in common cases. */
3860 restart:
3862 x = *loc;
3863 if (x == 0)
3864 return 1;
3866 /* We may have detected and deleted invalid asm statements. */
3867 if (object && INSN_P (object) && INSN_DELETED_P (object))
3868 return 1;
3870 code = GET_CODE (x);
3872 /* Check for some special cases. */
3873 switch (code)
3875 case CONST_INT:
3876 case CONST_DOUBLE:
3877 case CONST_VECTOR:
3878 case CONST:
3879 case SYMBOL_REF:
3880 case CODE_LABEL:
3881 case PC:
3882 case CC0:
3883 case ASM_INPUT:
3884 case ADDR_VEC:
3885 case ADDR_DIFF_VEC:
3886 case RETURN:
3887 return 1;
3889 case SET:
3890 /* We are allowed to set the virtual registers. This means that
3891 the actual register should receive the source minus the
3892 appropriate offset. This is used, for example, in the handling
3893 of non-local gotos. */
3894 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3896 rtx src = SET_SRC (x);
3898 /* We are setting the register, not using it, so the relevant
3899 offset is the negative of the offset to use were we using
3900 the register. */
3901 offset = - offset;
3902 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3904 /* The only valid sources here are PLUS or REG. Just do
3905 the simplest possible thing to handle them. */
3906 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3908 instantiate_virtual_regs_lossage (object);
3909 return 1;
3912 start_sequence ();
3913 if (GET_CODE (src) != REG)
3914 temp = force_operand (src, NULL_RTX);
3915 else
3916 temp = src;
3917 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3918 seq = get_insns ();
3919 end_sequence ();
3921 emit_insn_before (seq, object);
3922 SET_DEST (x) = new;
3924 if (! validate_change (object, &SET_SRC (x), temp, 0)
3925 || ! extra_insns)
3926 instantiate_virtual_regs_lossage (object);
3928 return 1;
3931 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3932 loc = &SET_SRC (x);
3933 goto restart;
3935 case PLUS:
3936 /* Handle special case of virtual register plus constant. */
3937 if (CONSTANT_P (XEXP (x, 1)))
3939 rtx old, new_offset;
3941 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3942 if (GET_CODE (XEXP (x, 0)) == PLUS)
3944 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3946 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3947 extra_insns);
3948 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3950 else
3952 loc = &XEXP (x, 0);
3953 goto restart;
3957 #ifdef POINTERS_EXTEND_UNSIGNED
3958 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3959 we can commute the PLUS and SUBREG because pointers into the
3960 frame are well-behaved. */
3961 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3962 && GET_CODE (XEXP (x, 1)) == CONST_INT
3963 && 0 != (new
3964 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3965 &offset))
3966 && validate_change (object, loc,
3967 plus_constant (gen_lowpart (ptr_mode,
3968 new),
3969 offset
3970 + INTVAL (XEXP (x, 1))),
3972 return 1;
3973 #endif
3974 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3976 /* We know the second operand is a constant. Unless the
3977 first operand is a REG (which has been already checked),
3978 it needs to be checked. */
3979 if (GET_CODE (XEXP (x, 0)) != REG)
3981 loc = &XEXP (x, 0);
3982 goto restart;
3984 return 1;
3987 new_offset = plus_constant (XEXP (x, 1), offset);
3989 /* If the new constant is zero, try to replace the sum with just
3990 the register. */
3991 if (new_offset == const0_rtx
3992 && validate_change (object, loc, new, 0))
3993 return 1;
3995 /* Next try to replace the register and new offset.
3996 There are two changes to validate here and we can't assume that
3997 in the case of old offset equals new just changing the register
3998 will yield a valid insn. In the interests of a little efficiency,
3999 however, we only call validate change once (we don't queue up the
4000 changes and then call apply_change_group). */
4002 old = XEXP (x, 0);
4003 if (offset == 0
4004 ? ! validate_change (object, &XEXP (x, 0), new, 0)
4005 : (XEXP (x, 0) = new,
4006 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
4008 if (! extra_insns)
4010 XEXP (x, 0) = old;
4011 return 0;
4014 /* Otherwise copy the new constant into a register and replace
4015 constant with that register. */
4016 temp = gen_reg_rtx (Pmode);
4017 XEXP (x, 0) = new;
4018 if (validate_change (object, &XEXP (x, 1), temp, 0))
4019 emit_insn_before (gen_move_insn (temp, new_offset), object);
4020 else
4022 /* If that didn't work, replace this expression with a
4023 register containing the sum. */
4025 XEXP (x, 0) = old;
4026 new = gen_rtx_PLUS (Pmode, new, new_offset);
4028 start_sequence ();
4029 temp = force_operand (new, NULL_RTX);
4030 seq = get_insns ();
4031 end_sequence ();
4033 emit_insn_before (seq, object);
4034 if (! validate_change (object, loc, temp, 0)
4035 && ! validate_replace_rtx (x, temp, object))
4037 instantiate_virtual_regs_lossage (object);
4038 return 1;
4043 return 1;
4046 /* Fall through to generic two-operand expression case. */
4047 case EXPR_LIST:
4048 case CALL:
4049 case COMPARE:
4050 case MINUS:
4051 case MULT:
4052 case DIV: case UDIV:
4053 case MOD: case UMOD:
4054 case AND: case IOR: case XOR:
4055 case ROTATERT: case ROTATE:
4056 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4057 case NE: case EQ:
4058 case GE: case GT: case GEU: case GTU:
4059 case LE: case LT: case LEU: case LTU:
4060 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
4061 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
4062 loc = &XEXP (x, 0);
4063 goto restart;
4065 case MEM:
4066 /* Most cases of MEM that convert to valid addresses have already been
4067 handled by our scan of decls. The only special handling we
4068 need here is to make a copy of the rtx to ensure it isn't being
4069 shared if we have to change it to a pseudo.
4071 If the rtx is a simple reference to an address via a virtual register,
4072 it can potentially be shared. In such cases, first try to make it
4073 a valid address, which can also be shared. Otherwise, copy it and
4074 proceed normally.
4076 First check for common cases that need no processing. These are
4077 usually due to instantiation already being done on a previous instance
4078 of a shared rtx. */
4080 temp = XEXP (x, 0);
4081 if (CONSTANT_ADDRESS_P (temp)
4082 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4083 || temp == arg_pointer_rtx
4084 #endif
4085 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4086 || temp == hard_frame_pointer_rtx
4087 #endif
4088 || temp == frame_pointer_rtx)
4089 return 1;
4091 if (GET_CODE (temp) == PLUS
4092 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4093 && (XEXP (temp, 0) == frame_pointer_rtx
4094 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4095 || XEXP (temp, 0) == hard_frame_pointer_rtx
4096 #endif
4097 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4098 || XEXP (temp, 0) == arg_pointer_rtx
4099 #endif
4101 return 1;
4103 if (temp == virtual_stack_vars_rtx
4104 || temp == virtual_incoming_args_rtx
4105 || (GET_CODE (temp) == PLUS
4106 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4107 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4108 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4110 /* This MEM may be shared. If the substitution can be done without
4111 the need to generate new pseudos, we want to do it in place
4112 so all copies of the shared rtx benefit. The call below will
4113 only make substitutions if the resulting address is still
4114 valid.
4116 Note that we cannot pass X as the object in the recursive call
4117 since the insn being processed may not allow all valid
4118 addresses. However, if we were not passed on object, we can
4119 only modify X without copying it if X will have a valid
4120 address.
4122 ??? Also note that this can still lose if OBJECT is an insn that
4123 has less restrictions on an address that some other insn.
4124 In that case, we will modify the shared address. This case
4125 doesn't seem very likely, though. One case where this could
4126 happen is in the case of a USE or CLOBBER reference, but we
4127 take care of that below. */
4129 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4130 object ? object : x, 0))
4131 return 1;
4133 /* Otherwise make a copy and process that copy. We copy the entire
4134 RTL expression since it might be a PLUS which could also be
4135 shared. */
4136 *loc = x = copy_rtx (x);
4139 /* Fall through to generic unary operation case. */
4140 case PREFETCH:
4141 case SUBREG:
4142 case STRICT_LOW_PART:
4143 case NEG: case NOT:
4144 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4145 case SIGN_EXTEND: case ZERO_EXTEND:
4146 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4147 case FLOAT: case FIX:
4148 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4149 case ABS:
4150 case SQRT:
4151 case FFS:
4152 case CLZ: case CTZ:
4153 case POPCOUNT: case PARITY:
4154 /* These case either have just one operand or we know that we need not
4155 check the rest of the operands. */
4156 loc = &XEXP (x, 0);
4157 goto restart;
4159 case USE:
4160 case CLOBBER:
4161 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4162 go ahead and make the invalid one, but do it to a copy. For a REG,
4163 just make the recursive call, since there's no chance of a problem. */
4165 if ((GET_CODE (XEXP (x, 0)) == MEM
4166 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4168 || (GET_CODE (XEXP (x, 0)) == REG
4169 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4170 return 1;
4172 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4173 loc = &XEXP (x, 0);
4174 goto restart;
4176 case REG:
4177 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4178 in front of this insn and substitute the temporary. */
4179 if ((new = instantiate_new_reg (x, &offset)) != 0)
4181 temp = plus_constant (new, offset);
4182 if (!validate_change (object, loc, temp, 0))
4184 if (! extra_insns)
4185 return 0;
4187 start_sequence ();
4188 temp = force_operand (temp, NULL_RTX);
4189 seq = get_insns ();
4190 end_sequence ();
4192 emit_insn_before (seq, object);
4193 if (! validate_change (object, loc, temp, 0)
4194 && ! validate_replace_rtx (x, temp, object))
4195 instantiate_virtual_regs_lossage (object);
4199 return 1;
4201 case ADDRESSOF:
4202 if (GET_CODE (XEXP (x, 0)) == REG)
4203 return 1;
4205 else if (GET_CODE (XEXP (x, 0)) == MEM)
4207 /* If we have a (addressof (mem ..)), do any instantiation inside
4208 since we know we'll be making the inside valid when we finally
4209 remove the ADDRESSOF. */
4210 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4211 return 1;
4213 break;
4215 default:
4216 break;
4219 /* Scan all subexpressions. */
4220 fmt = GET_RTX_FORMAT (code);
4221 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4222 if (*fmt == 'e')
4224 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4225 return 0;
4227 else if (*fmt == 'E')
4228 for (j = 0; j < XVECLEN (x, i); j++)
4229 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4230 extra_insns))
4231 return 0;
4233 return 1;
4236 /* Optimization: assuming this function does not receive nonlocal gotos,
4237 delete the handlers for such, as well as the insns to establish
4238 and disestablish them. */
4240 static void
4241 delete_handlers ()
4243 rtx insn;
4244 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4246 /* Delete the handler by turning off the flag that would
4247 prevent jump_optimize from deleting it.
4248 Also permit deletion of the nonlocal labels themselves
4249 if nothing local refers to them. */
4250 if (GET_CODE (insn) == CODE_LABEL)
4252 tree t, last_t;
4254 LABEL_PRESERVE_P (insn) = 0;
4256 /* Remove it from the nonlocal_label list, to avoid confusing
4257 flow. */
4258 for (t = nonlocal_labels, last_t = 0; t;
4259 last_t = t, t = TREE_CHAIN (t))
4260 if (DECL_RTL (TREE_VALUE (t)) == insn)
4261 break;
4262 if (t)
4264 if (! last_t)
4265 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4266 else
4267 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4270 if (GET_CODE (insn) == INSN)
4272 int can_delete = 0;
4273 rtx t;
4274 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4275 if (reg_mentioned_p (t, PATTERN (insn)))
4277 can_delete = 1;
4278 break;
4280 if (can_delete
4281 || (nonlocal_goto_stack_level != 0
4282 && reg_mentioned_p (nonlocal_goto_stack_level,
4283 PATTERN (insn))))
4284 delete_related_insns (insn);
4289 /* Return the first insn following those generated by `assign_parms'. */
4292 get_first_nonparm_insn ()
4294 if (last_parm_insn)
4295 return NEXT_INSN (last_parm_insn);
4296 return get_insns ();
4299 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4300 This means a type for which function calls must pass an address to the
4301 function or get an address back from the function.
4302 EXP may be a type node or an expression (whose type is tested). */
4305 aggregate_value_p (exp)
4306 tree exp;
4308 int i, regno, nregs;
4309 rtx reg;
4311 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4313 if (TREE_CODE (type) == VOID_TYPE)
4314 return 0;
4315 if (RETURN_IN_MEMORY (type))
4316 return 1;
4317 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4318 and thus can't be returned in registers. */
4319 if (TREE_ADDRESSABLE (type))
4320 return 1;
4321 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4322 return 1;
4323 /* Make sure we have suitable call-clobbered regs to return
4324 the value in; if not, we must return it in memory. */
4325 reg = hard_function_value (type, 0, 0);
4327 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4328 it is OK. */
4329 if (GET_CODE (reg) != REG)
4330 return 0;
4332 regno = REGNO (reg);
4333 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4334 for (i = 0; i < nregs; i++)
4335 if (! call_used_regs[regno + i])
4336 return 1;
4337 return 0;
4340 /* Assign RTL expressions to the function's parameters.
4341 This may involve copying them into registers and using
4342 those registers as the RTL for them. */
4344 void
4345 assign_parms (fndecl)
4346 tree fndecl;
4348 tree parm;
4349 CUMULATIVE_ARGS args_so_far;
4350 /* Total space needed so far for args on the stack,
4351 given as a constant and a tree-expression. */
4352 struct args_size stack_args_size;
4353 tree fntype = TREE_TYPE (fndecl);
4354 tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
4355 /* This is used for the arg pointer when referring to stack args. */
4356 rtx internal_arg_pointer;
4357 /* This is a dummy PARM_DECL that we used for the function result if
4358 the function returns a structure. */
4359 tree function_result_decl = 0;
4360 #ifdef SETUP_INCOMING_VARARGS
4361 int varargs_setup = 0;
4362 #endif
4363 int reg_parm_stack_space = 0;
4364 rtx conversion_insns = 0;
4366 /* Nonzero if function takes extra anonymous args.
4367 This means the last named arg must be on the stack
4368 right before the anonymous ones. */
4369 int stdarg
4370 = (TYPE_ARG_TYPES (fntype) != 0
4371 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4372 != void_type_node));
4374 current_function_stdarg = stdarg;
4376 /* If the reg that the virtual arg pointer will be translated into is
4377 not a fixed reg or is the stack pointer, make a copy of the virtual
4378 arg pointer, and address parms via the copy. The frame pointer is
4379 considered fixed even though it is not marked as such.
4381 The second time through, simply use ap to avoid generating rtx. */
4383 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4384 || ! (fixed_regs[ARG_POINTER_REGNUM]
4385 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4386 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4387 else
4388 internal_arg_pointer = virtual_incoming_args_rtx;
4389 current_function_internal_arg_pointer = internal_arg_pointer;
4391 stack_args_size.constant = 0;
4392 stack_args_size.var = 0;
4394 /* If struct value address is treated as the first argument, make it so. */
4395 if (aggregate_value_p (DECL_RESULT (fndecl))
4396 && ! current_function_returns_pcc_struct
4397 && struct_value_incoming_rtx == 0)
4399 tree type = build_pointer_type (TREE_TYPE (fntype));
4401 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4403 DECL_ARG_TYPE (function_result_decl) = type;
4404 TREE_CHAIN (function_result_decl) = fnargs;
4405 fnargs = function_result_decl;
4408 orig_fnargs = fnargs;
4410 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4411 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4413 if (SPLIT_COMPLEX_ARGS)
4414 fnargs = split_complex_args (fnargs);
4416 #ifdef REG_PARM_STACK_SPACE
4417 #ifdef MAYBE_REG_PARM_STACK_SPACE
4418 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4419 #else
4420 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4421 #endif
4422 #endif
4424 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4425 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4426 #else
4427 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl);
4428 #endif
4430 /* We haven't yet found an argument that we must push and pretend the
4431 caller did. */
4432 current_function_pretend_args_size = 0;
4434 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4436 rtx entry_parm;
4437 rtx stack_parm;
4438 enum machine_mode promoted_mode, passed_mode;
4439 enum machine_mode nominal_mode, promoted_nominal_mode;
4440 int unsignedp;
4441 struct locate_and_pad_arg_data locate;
4442 int passed_pointer = 0;
4443 int did_conversion = 0;
4444 tree passed_type = DECL_ARG_TYPE (parm);
4445 tree nominal_type = TREE_TYPE (parm);
4446 int last_named = 0, named_arg;
4447 int in_regs;
4448 int partial = 0;
4450 /* Set LAST_NAMED if this is last named arg before last
4451 anonymous args. */
4452 if (stdarg)
4454 tree tem;
4456 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4457 if (DECL_NAME (tem))
4458 break;
4460 if (tem == 0)
4461 last_named = 1;
4463 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4464 most machines, if this is a varargs/stdarg function, then we treat
4465 the last named arg as if it were anonymous too. */
4466 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4468 if (TREE_TYPE (parm) == error_mark_node
4469 /* This can happen after weird syntax errors
4470 or if an enum type is defined among the parms. */
4471 || TREE_CODE (parm) != PARM_DECL
4472 || passed_type == NULL)
4474 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4475 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4476 TREE_USED (parm) = 1;
4477 continue;
4480 /* Find mode of arg as it is passed, and mode of arg
4481 as it should be during execution of this function. */
4482 passed_mode = TYPE_MODE (passed_type);
4483 nominal_mode = TYPE_MODE (nominal_type);
4485 /* If the parm's mode is VOID, its value doesn't matter,
4486 and avoid the usual things like emit_move_insn that could crash. */
4487 if (nominal_mode == VOIDmode)
4489 SET_DECL_RTL (parm, const0_rtx);
4490 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4491 continue;
4494 /* If the parm is to be passed as a transparent union, use the
4495 type of the first field for the tests below. We have already
4496 verified that the modes are the same. */
4497 if (DECL_TRANSPARENT_UNION (parm)
4498 || (TREE_CODE (passed_type) == UNION_TYPE
4499 && TYPE_TRANSPARENT_UNION (passed_type)))
4500 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4502 /* See if this arg was passed by invisible reference. It is if
4503 it is an object whose size depends on the contents of the
4504 object itself or if the machine requires these objects be passed
4505 that way. */
4507 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
4508 || TREE_ADDRESSABLE (passed_type)
4509 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4510 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4511 passed_type, named_arg)
4512 #endif
4515 passed_type = nominal_type = build_pointer_type (passed_type);
4516 passed_pointer = 1;
4517 passed_mode = nominal_mode = Pmode;
4519 /* See if the frontend wants to pass this by invisible reference. */
4520 else if (passed_type != nominal_type
4521 && POINTER_TYPE_P (passed_type)
4522 && TREE_TYPE (passed_type) == nominal_type)
4524 nominal_type = passed_type;
4525 passed_pointer = 1;
4526 passed_mode = nominal_mode = Pmode;
4529 promoted_mode = passed_mode;
4531 #ifdef PROMOTE_FUNCTION_ARGS
4532 /* Compute the mode in which the arg is actually extended to. */
4533 unsignedp = TREE_UNSIGNED (passed_type);
4534 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4535 #endif
4537 /* Let machine desc say which reg (if any) the parm arrives in.
4538 0 means it arrives on the stack. */
4539 #ifdef FUNCTION_INCOMING_ARG
4540 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4541 passed_type, named_arg);
4542 #else
4543 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4544 passed_type, named_arg);
4545 #endif
4547 if (entry_parm == 0)
4548 promoted_mode = passed_mode;
4550 #ifdef SETUP_INCOMING_VARARGS
4551 /* If this is the last named parameter, do any required setup for
4552 varargs or stdargs. We need to know about the case of this being an
4553 addressable type, in which case we skip the registers it
4554 would have arrived in.
4556 For stdargs, LAST_NAMED will be set for two parameters, the one that
4557 is actually the last named, and the dummy parameter. We only
4558 want to do this action once.
4560 Also, indicate when RTL generation is to be suppressed. */
4561 if (last_named && !varargs_setup)
4563 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4564 current_function_pretend_args_size, 0);
4565 varargs_setup = 1;
4567 #endif
4569 /* Determine parm's home in the stack,
4570 in case it arrives in the stack or we should pretend it did.
4572 Compute the stack position and rtx where the argument arrives
4573 and its size.
4575 There is one complexity here: If this was a parameter that would
4576 have been passed in registers, but wasn't only because it is
4577 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4578 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4579 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4580 0 as it was the previous time. */
4581 in_regs = entry_parm != 0;
4582 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4583 in_regs = 1;
4584 #endif
4585 if (!in_regs && !named_arg)
4587 int pretend_named = PRETEND_OUTGOING_VARARGS_NAMED;
4588 if (pretend_named)
4590 #ifdef FUNCTION_INCOMING_ARG
4591 in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4592 passed_type,
4593 pretend_named) != 0;
4594 #else
4595 in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
4596 passed_type,
4597 pretend_named) != 0;
4598 #endif
4602 /* If this parameter was passed both in registers and in the stack,
4603 use the copy on the stack. */
4604 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4605 entry_parm = 0;
4607 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4608 if (entry_parm)
4609 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4610 passed_type, named_arg);
4611 #endif
4613 memset (&locate, 0, sizeof (locate));
4614 locate_and_pad_parm (promoted_mode, passed_type, in_regs,
4615 entry_parm ? partial : 0, fndecl,
4616 &stack_args_size, &locate);
4619 rtx offset_rtx;
4621 /* If we're passing this arg using a reg, make its stack home
4622 the aligned stack slot. */
4623 if (entry_parm)
4624 offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
4625 else
4626 offset_rtx = ARGS_SIZE_RTX (locate.offset);
4628 if (offset_rtx == const0_rtx)
4629 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4630 else
4631 stack_parm = gen_rtx_MEM (promoted_mode,
4632 gen_rtx_PLUS (Pmode,
4633 internal_arg_pointer,
4634 offset_rtx));
4636 set_mem_attributes (stack_parm, parm, 1);
4638 /* Set also REG_ATTRS if parameter was passed in a register. */
4639 if (entry_parm)
4640 set_reg_attrs_for_parm (entry_parm, stack_parm);
4643 /* If this parm was passed part in regs and part in memory,
4644 pretend it arrived entirely in memory
4645 by pushing the register-part onto the stack.
4647 In the special case of a DImode or DFmode that is split,
4648 we could put it together in a pseudoreg directly,
4649 but for now that's not worth bothering with. */
4651 if (partial)
4653 #ifndef MAYBE_REG_PARM_STACK_SPACE
4654 /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4655 split parameters was allocated by our caller, so we
4656 won't be pushing it in the prolog. */
4657 if (reg_parm_stack_space == 0)
4658 #endif
4659 current_function_pretend_args_size
4660 = (((partial * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4661 / (PARM_BOUNDARY / BITS_PER_UNIT)
4662 * (PARM_BOUNDARY / BITS_PER_UNIT));
4664 /* Handle calls that pass values in multiple non-contiguous
4665 locations. The Irix 6 ABI has examples of this. */
4666 if (GET_CODE (entry_parm) == PARALLEL)
4667 emit_group_store (validize_mem (stack_parm), entry_parm,
4668 int_size_in_bytes (TREE_TYPE (parm)));
4670 else
4671 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
4672 partial);
4674 entry_parm = stack_parm;
4677 /* If we didn't decide this parm came in a register,
4678 by default it came on the stack. */
4679 if (entry_parm == 0)
4680 entry_parm = stack_parm;
4682 /* Record permanently how this parm was passed. */
4683 DECL_INCOMING_RTL (parm) = entry_parm;
4685 /* If there is actually space on the stack for this parm,
4686 count it in stack_args_size; otherwise set stack_parm to 0
4687 to indicate there is no preallocated stack slot for the parm. */
4689 if (entry_parm == stack_parm
4690 || (GET_CODE (entry_parm) == PARALLEL
4691 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4692 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4693 /* On some machines, even if a parm value arrives in a register
4694 there is still an (uninitialized) stack slot allocated for it.
4696 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4697 whether this parameter already has a stack slot allocated,
4698 because an arg block exists only if current_function_args_size
4699 is larger than some threshold, and we haven't calculated that
4700 yet. So, for now, we just assume that stack slots never exist
4701 in this case. */
4702 || REG_PARM_STACK_SPACE (fndecl) > 0
4703 #endif
4706 stack_args_size.constant += locate.size.constant;
4707 /* locate.size doesn't include the part in regs. */
4708 if (partial)
4709 stack_args_size.constant += current_function_pretend_args_size;
4710 if (locate.size.var)
4711 ADD_PARM_SIZE (stack_args_size, locate.size.var);
4713 else
4714 /* No stack slot was pushed for this parm. */
4715 stack_parm = 0;
4717 /* Update info on where next arg arrives in registers. */
4719 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4720 passed_type, named_arg);
4722 /* If we can't trust the parm stack slot to be aligned enough
4723 for its ultimate type, don't use that slot after entry.
4724 We'll make another stack slot, if we need one. */
4726 unsigned int thisparm_boundary
4727 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4729 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4730 stack_parm = 0;
4733 /* If parm was passed in memory, and we need to convert it on entry,
4734 don't store it back in that same slot. */
4735 if (entry_parm == stack_parm
4736 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4737 stack_parm = 0;
4739 /* When an argument is passed in multiple locations, we can't
4740 make use of this information, but we can save some copying if
4741 the whole argument is passed in a single register. */
4742 if (GET_CODE (entry_parm) == PARALLEL
4743 && nominal_mode != BLKmode && passed_mode != BLKmode)
4745 int i, len = XVECLEN (entry_parm, 0);
4747 for (i = 0; i < len; i++)
4748 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4749 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4750 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4751 == passed_mode)
4752 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4754 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4755 DECL_INCOMING_RTL (parm) = entry_parm;
4756 break;
4760 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4761 in the mode in which it arrives.
4762 STACK_PARM is an RTX for a stack slot where the parameter can live
4763 during the function (in case we want to put it there).
4764 STACK_PARM is 0 if no stack slot was pushed for it.
4766 Now output code if necessary to convert ENTRY_PARM to
4767 the type in which this function declares it,
4768 and store that result in an appropriate place,
4769 which may be a pseudo reg, may be STACK_PARM,
4770 or may be a local stack slot if STACK_PARM is 0.
4772 Set DECL_RTL to that place. */
4774 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4776 /* If a BLKmode arrives in registers, copy it to a stack slot.
4777 Handle calls that pass values in multiple non-contiguous
4778 locations. The Irix 6 ABI has examples of this. */
4779 if (GET_CODE (entry_parm) == REG
4780 || GET_CODE (entry_parm) == PARALLEL)
4782 int size = int_size_in_bytes (TREE_TYPE (parm));
4783 int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
4784 rtx mem;
4786 /* Note that we will be storing an integral number of words.
4787 So we have to be careful to ensure that we allocate an
4788 integral number of words. We do this below in the
4789 assign_stack_local if space was not allocated in the argument
4790 list. If it was, this will not work if PARM_BOUNDARY is not
4791 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4792 if it becomes a problem. */
4794 if (stack_parm == 0)
4796 stack_parm
4797 = assign_stack_local (GET_MODE (entry_parm),
4798 size_stored, 0);
4799 set_mem_attributes (stack_parm, parm, 1);
4802 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4803 abort ();
4805 mem = validize_mem (stack_parm);
4807 /* Handle calls that pass values in multiple non-contiguous
4808 locations. The Irix 6 ABI has examples of this. */
4809 if (GET_CODE (entry_parm) == PARALLEL)
4810 emit_group_store (mem, entry_parm, size);
4812 else if (size == 0)
4815 /* If SIZE is that of a mode no bigger than a word, just use
4816 that mode's store operation. */
4817 else if (size <= UNITS_PER_WORD)
4819 enum machine_mode mode
4820 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
4822 if (mode != BLKmode)
4824 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
4825 emit_move_insn (change_address (mem, mode, 0), reg);
4828 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4829 machine must be aligned to the left before storing
4830 to memory. Note that the previous test doesn't
4831 handle all cases (e.g. SIZE == 3). */
4832 else if (size != UNITS_PER_WORD
4833 && BYTES_BIG_ENDIAN)
4835 rtx tem, x;
4836 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4837 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
4839 x = expand_binop (word_mode, ashl_optab, reg,
4840 GEN_INT (by), 0, 1, OPTAB_WIDEN);
4841 tem = change_address (mem, word_mode, 0);
4842 emit_move_insn (tem, x);
4844 else
4845 move_block_from_reg (REGNO (entry_parm), mem,
4846 size_stored / UNITS_PER_WORD);
4848 else
4849 move_block_from_reg (REGNO (entry_parm), mem,
4850 size_stored / UNITS_PER_WORD);
4852 SET_DECL_RTL (parm, stack_parm);
4854 else if (! ((! optimize
4855 && ! DECL_REGISTER (parm))
4856 || TREE_SIDE_EFFECTS (parm)
4857 /* If -ffloat-store specified, don't put explicit
4858 float variables into registers. */
4859 || (flag_float_store
4860 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4861 /* Always assign pseudo to structure return or item passed
4862 by invisible reference. */
4863 || passed_pointer || parm == function_result_decl)
4865 /* Store the parm in a pseudoregister during the function, but we
4866 may need to do it in a wider mode. */
4868 rtx parmreg;
4869 unsigned int regno, regnoi = 0, regnor = 0;
4871 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4873 promoted_nominal_mode
4874 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4876 parmreg = gen_reg_rtx (promoted_nominal_mode);
4877 mark_user_reg (parmreg);
4879 /* If this was an item that we received a pointer to, set DECL_RTL
4880 appropriately. */
4881 if (passed_pointer)
4883 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4884 parmreg);
4885 set_mem_attributes (x, parm, 1);
4886 SET_DECL_RTL (parm, x);
4888 else
4890 SET_DECL_RTL (parm, parmreg);
4891 maybe_set_unchanging (DECL_RTL (parm), parm);
4894 /* Copy the value into the register. */
4895 if (nominal_mode != passed_mode
4896 || promoted_nominal_mode != promoted_mode)
4898 int save_tree_used;
4899 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4900 mode, by the caller. We now have to convert it to
4901 NOMINAL_MODE, if different. However, PARMREG may be in
4902 a different mode than NOMINAL_MODE if it is being stored
4903 promoted.
4905 If ENTRY_PARM is a hard register, it might be in a register
4906 not valid for operating in its mode (e.g., an odd-numbered
4907 register for a DFmode). In that case, moves are the only
4908 thing valid, so we can't do a convert from there. This
4909 occurs when the calling sequence allow such misaligned
4910 usages.
4912 In addition, the conversion may involve a call, which could
4913 clobber parameters which haven't been copied to pseudo
4914 registers yet. Therefore, we must first copy the parm to
4915 a pseudo reg here, and save the conversion until after all
4916 parameters have been moved. */
4918 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4920 emit_move_insn (tempreg, validize_mem (entry_parm));
4922 push_to_sequence (conversion_insns);
4923 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4925 if (GET_CODE (tempreg) == SUBREG
4926 && GET_MODE (tempreg) == nominal_mode
4927 && GET_CODE (SUBREG_REG (tempreg)) == REG
4928 && nominal_mode == passed_mode
4929 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4930 && GET_MODE_SIZE (GET_MODE (tempreg))
4931 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4933 /* The argument is already sign/zero extended, so note it
4934 into the subreg. */
4935 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4936 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4939 /* TREE_USED gets set erroneously during expand_assignment. */
4940 save_tree_used = TREE_USED (parm);
4941 expand_assignment (parm,
4942 make_tree (nominal_type, tempreg), 0, 0);
4943 TREE_USED (parm) = save_tree_used;
4944 conversion_insns = get_insns ();
4945 did_conversion = 1;
4946 end_sequence ();
4948 else
4949 emit_move_insn (parmreg, validize_mem (entry_parm));
4951 /* If we were passed a pointer but the actual value
4952 can safely live in a register, put it in one. */
4953 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4954 /* If by-reference argument was promoted, demote it. */
4955 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4956 || ! ((! optimize
4957 && ! DECL_REGISTER (parm))
4958 || TREE_SIDE_EFFECTS (parm)
4959 /* If -ffloat-store specified, don't put explicit
4960 float variables into registers. */
4961 || (flag_float_store
4962 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4964 /* We can't use nominal_mode, because it will have been set to
4965 Pmode above. We must use the actual mode of the parm. */
4966 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4967 mark_user_reg (parmreg);
4968 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4970 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4971 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4972 push_to_sequence (conversion_insns);
4973 emit_move_insn (tempreg, DECL_RTL (parm));
4974 SET_DECL_RTL (parm,
4975 convert_to_mode (GET_MODE (parmreg),
4976 tempreg,
4977 unsigned_p));
4978 emit_move_insn (parmreg, DECL_RTL (parm));
4979 conversion_insns = get_insns();
4980 did_conversion = 1;
4981 end_sequence ();
4983 else
4984 emit_move_insn (parmreg, DECL_RTL (parm));
4985 SET_DECL_RTL (parm, parmreg);
4986 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4987 now the parm. */
4988 stack_parm = 0;
4990 #ifdef FUNCTION_ARG_CALLEE_COPIES
4991 /* If we are passed an arg by reference and it is our responsibility
4992 to make a copy, do it now.
4993 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4994 original argument, so we must recreate them in the call to
4995 FUNCTION_ARG_CALLEE_COPIES. */
4996 /* ??? Later add code to handle the case that if the argument isn't
4997 modified, don't do the copy. */
4999 else if (passed_pointer
5000 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
5001 TYPE_MODE (DECL_ARG_TYPE (parm)),
5002 DECL_ARG_TYPE (parm),
5003 named_arg)
5004 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
5006 rtx copy;
5007 tree type = DECL_ARG_TYPE (parm);
5009 /* This sequence may involve a library call perhaps clobbering
5010 registers that haven't been copied to pseudos yet. */
5012 push_to_sequence (conversion_insns);
5014 if (!COMPLETE_TYPE_P (type)
5015 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5016 /* This is a variable sized object. */
5017 copy = gen_rtx_MEM (BLKmode,
5018 allocate_dynamic_stack_space
5019 (expr_size (parm), NULL_RTX,
5020 TYPE_ALIGN (type)));
5021 else
5022 copy = assign_stack_temp (TYPE_MODE (type),
5023 int_size_in_bytes (type), 1);
5024 set_mem_attributes (copy, parm, 1);
5026 store_expr (parm, copy, 0);
5027 emit_move_insn (parmreg, XEXP (copy, 0));
5028 conversion_insns = get_insns ();
5029 did_conversion = 1;
5030 end_sequence ();
5032 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5034 /* In any case, record the parm's desired stack location
5035 in case we later discover it must live in the stack.
5037 If it is a COMPLEX value, store the stack location for both
5038 halves. */
5040 if (GET_CODE (parmreg) == CONCAT)
5041 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
5042 else
5043 regno = REGNO (parmreg);
5045 if (regno >= max_parm_reg)
5047 rtx *new;
5048 int old_max_parm_reg = max_parm_reg;
5050 /* It's slow to expand this one register at a time,
5051 but it's also rare and we need max_parm_reg to be
5052 precisely correct. */
5053 max_parm_reg = regno + 1;
5054 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
5055 max_parm_reg * sizeof (rtx));
5056 memset ((char *) (new + old_max_parm_reg), 0,
5057 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5058 parm_reg_stack_loc = new;
5061 if (GET_CODE (parmreg) == CONCAT)
5063 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5065 regnor = REGNO (gen_realpart (submode, parmreg));
5066 regnoi = REGNO (gen_imagpart (submode, parmreg));
5068 if (stack_parm != 0)
5070 parm_reg_stack_loc[regnor]
5071 = gen_realpart (submode, stack_parm);
5072 parm_reg_stack_loc[regnoi]
5073 = gen_imagpart (submode, stack_parm);
5075 else
5077 parm_reg_stack_loc[regnor] = 0;
5078 parm_reg_stack_loc[regnoi] = 0;
5081 else
5082 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5084 /* Mark the register as eliminable if we did no conversion
5085 and it was copied from memory at a fixed offset,
5086 and the arg pointer was not copied to a pseudo-reg.
5087 If the arg pointer is a pseudo reg or the offset formed
5088 an invalid address, such memory-equivalences
5089 as we make here would screw up life analysis for it. */
5090 if (nominal_mode == passed_mode
5091 && ! did_conversion
5092 && stack_parm != 0
5093 && GET_CODE (stack_parm) == MEM
5094 && locate.offset.var == 0
5095 && reg_mentioned_p (virtual_incoming_args_rtx,
5096 XEXP (stack_parm, 0)))
5098 rtx linsn = get_last_insn ();
5099 rtx sinsn, set;
5101 /* Mark complex types separately. */
5102 if (GET_CODE (parmreg) == CONCAT)
5103 /* Scan backwards for the set of the real and
5104 imaginary parts. */
5105 for (sinsn = linsn; sinsn != 0;
5106 sinsn = prev_nonnote_insn (sinsn))
5108 set = single_set (sinsn);
5109 if (set != 0
5110 && SET_DEST (set) == regno_reg_rtx [regnoi])
5111 REG_NOTES (sinsn)
5112 = gen_rtx_EXPR_LIST (REG_EQUIV,
5113 parm_reg_stack_loc[regnoi],
5114 REG_NOTES (sinsn));
5115 else if (set != 0
5116 && SET_DEST (set) == regno_reg_rtx [regnor])
5117 REG_NOTES (sinsn)
5118 = gen_rtx_EXPR_LIST (REG_EQUIV,
5119 parm_reg_stack_loc[regnor],
5120 REG_NOTES (sinsn));
5122 else if ((set = single_set (linsn)) != 0
5123 && SET_DEST (set) == parmreg)
5124 REG_NOTES (linsn)
5125 = gen_rtx_EXPR_LIST (REG_EQUIV,
5126 stack_parm, REG_NOTES (linsn));
5129 /* For pointer data type, suggest pointer register. */
5130 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5131 mark_reg_pointer (parmreg,
5132 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5134 /* If something wants our address, try to use ADDRESSOF. */
5135 if (TREE_ADDRESSABLE (parm))
5137 /* If we end up putting something into the stack,
5138 fixup_var_refs_insns will need to make a pass over
5139 all the instructions. It looks through the pending
5140 sequences -- but it can't see the ones in the
5141 CONVERSION_INSNS, if they're not on the sequence
5142 stack. So, we go back to that sequence, just so that
5143 the fixups will happen. */
5144 push_to_sequence (conversion_insns);
5145 put_var_into_stack (parm, /*rescan=*/true);
5146 conversion_insns = get_insns ();
5147 end_sequence ();
5150 else
5152 /* Value must be stored in the stack slot STACK_PARM
5153 during function execution. */
5155 if (promoted_mode != nominal_mode)
5157 /* Conversion is required. */
5158 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5160 emit_move_insn (tempreg, validize_mem (entry_parm));
5162 push_to_sequence (conversion_insns);
5163 entry_parm = convert_to_mode (nominal_mode, tempreg,
5164 TREE_UNSIGNED (TREE_TYPE (parm)));
5165 if (stack_parm)
5166 /* ??? This may need a big-endian conversion on sparc64. */
5167 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5169 conversion_insns = get_insns ();
5170 did_conversion = 1;
5171 end_sequence ();
5174 if (entry_parm != stack_parm)
5176 if (stack_parm == 0)
5178 stack_parm
5179 = assign_stack_local (GET_MODE (entry_parm),
5180 GET_MODE_SIZE (GET_MODE (entry_parm)),
5182 set_mem_attributes (stack_parm, parm, 1);
5185 if (promoted_mode != nominal_mode)
5187 push_to_sequence (conversion_insns);
5188 emit_move_insn (validize_mem (stack_parm),
5189 validize_mem (entry_parm));
5190 conversion_insns = get_insns ();
5191 end_sequence ();
5193 else
5194 emit_move_insn (validize_mem (stack_parm),
5195 validize_mem (entry_parm));
5198 SET_DECL_RTL (parm, stack_parm);
5202 if (SPLIT_COMPLEX_ARGS)
5204 parm = orig_fnargs;
5206 for (; parm; parm = TREE_CHAIN (parm))
5208 tree type = TREE_TYPE (parm);
5210 if (TREE_CODE (type) == COMPLEX_TYPE)
5212 SET_DECL_RTL (parm,
5213 gen_rtx_CONCAT (DECL_MODE (parm),
5214 DECL_RTL (fnargs),
5215 DECL_RTL (TREE_CHAIN (fnargs))));
5216 DECL_INCOMING_RTL (parm)
5217 = gen_rtx_CONCAT (DECL_MODE (parm),
5218 DECL_INCOMING_RTL (fnargs),
5219 DECL_INCOMING_RTL (TREE_CHAIN (fnargs)));
5220 fnargs = TREE_CHAIN (fnargs);
5222 else
5224 SET_DECL_RTL (parm, DECL_RTL (fnargs));
5225 DECL_INCOMING_RTL (parm) = DECL_INCOMING_RTL (fnargs);
5227 fnargs = TREE_CHAIN (fnargs);
5231 /* Output all parameter conversion instructions (possibly including calls)
5232 now that all parameters have been copied out of hard registers. */
5233 emit_insn (conversion_insns);
5235 /* If we are receiving a struct value address as the first argument, set up
5236 the RTL for the function result. As this might require code to convert
5237 the transmitted address to Pmode, we do this here to ensure that possible
5238 preliminary conversions of the address have been emitted already. */
5239 if (function_result_decl)
5241 tree result = DECL_RESULT (fndecl);
5242 rtx addr = DECL_RTL (function_result_decl);
5243 rtx x;
5245 #ifdef POINTERS_EXTEND_UNSIGNED
5246 if (GET_MODE (addr) != Pmode)
5247 addr = convert_memory_address (Pmode, addr);
5248 #endif
5250 x = gen_rtx_MEM (DECL_MODE (result), addr);
5251 set_mem_attributes (x, result, 1);
5252 SET_DECL_RTL (result, x);
5255 last_parm_insn = get_last_insn ();
5257 current_function_args_size = stack_args_size.constant;
5259 /* Adjust function incoming argument size for alignment and
5260 minimum length. */
5262 #ifdef REG_PARM_STACK_SPACE
5263 #ifndef MAYBE_REG_PARM_STACK_SPACE
5264 current_function_args_size = MAX (current_function_args_size,
5265 REG_PARM_STACK_SPACE (fndecl));
5266 #endif
5267 #endif
5269 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5271 current_function_args_size
5272 = ((current_function_args_size + STACK_BYTES - 1)
5273 / STACK_BYTES) * STACK_BYTES;
5275 #ifdef ARGS_GROW_DOWNWARD
5276 current_function_arg_offset_rtx
5277 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5278 : expand_expr (size_diffop (stack_args_size.var,
5279 size_int (-stack_args_size.constant)),
5280 NULL_RTX, VOIDmode, 0));
5281 #else
5282 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5283 #endif
5285 /* See how many bytes, if any, of its args a function should try to pop
5286 on return. */
5288 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5289 current_function_args_size);
5291 /* For stdarg.h function, save info about
5292 regs and stack space used by the named args. */
5294 current_function_args_info = args_so_far;
5296 /* Set the rtx used for the function return value. Put this in its
5297 own variable so any optimizers that need this information don't have
5298 to include tree.h. Do this here so it gets done when an inlined
5299 function gets output. */
5301 current_function_return_rtx
5302 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5303 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5305 /* If scalar return value was computed in a pseudo-reg, or was a named
5306 return value that got dumped to the stack, copy that to the hard
5307 return register. */
5308 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5310 tree decl_result = DECL_RESULT (fndecl);
5311 rtx decl_rtl = DECL_RTL (decl_result);
5313 if (REG_P (decl_rtl)
5314 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5315 : DECL_REGISTER (decl_result))
5317 rtx real_decl_rtl;
5319 #ifdef FUNCTION_OUTGOING_VALUE
5320 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5321 fndecl);
5322 #else
5323 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5324 fndecl);
5325 #endif
5326 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5327 /* The delay slot scheduler assumes that current_function_return_rtx
5328 holds the hard register containing the return value, not a
5329 temporary pseudo. */
5330 current_function_return_rtx = real_decl_rtl;
5335 static tree
5336 split_complex_args (tree args)
5338 tree p;
5340 args = copy_list (args);
5342 for (p = args; p; p = TREE_CHAIN (p))
5344 tree complex_type = TREE_TYPE (p);
5346 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
5348 tree decl;
5349 tree subtype = TREE_TYPE (complex_type);
5351 /* Rewrite the PARM_DECL's type with its component. */
5352 TREE_TYPE (p) = subtype;
5353 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
5355 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
5356 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
5357 TREE_CHAIN (decl) = TREE_CHAIN (p);
5358 TREE_CHAIN (p) = decl;
5362 return args;
5365 /* Indicate whether REGNO is an incoming argument to the current function
5366 that was promoted to a wider mode. If so, return the RTX for the
5367 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5368 that REGNO is promoted from and whether the promotion was signed or
5369 unsigned. */
5371 #ifdef PROMOTE_FUNCTION_ARGS
5374 promoted_input_arg (regno, pmode, punsignedp)
5375 unsigned int regno;
5376 enum machine_mode *pmode;
5377 int *punsignedp;
5379 tree arg;
5381 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5382 arg = TREE_CHAIN (arg))
5383 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5384 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5385 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5388 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5390 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5391 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5392 && mode != DECL_MODE (arg))
5394 *pmode = DECL_MODE (arg);
5395 *punsignedp = unsignedp;
5396 return DECL_INCOMING_RTL (arg);
5400 return 0;
5403 #endif
5405 /* Compute the size and offset from the start of the stacked arguments for a
5406 parm passed in mode PASSED_MODE and with type TYPE.
5408 INITIAL_OFFSET_PTR points to the current offset into the stacked
5409 arguments.
5411 The starting offset and size for this parm are returned in
5412 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5413 nonzero, the offset is that of stack slot, which is returned in
5414 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5415 padding required from the initial offset ptr to the stack slot.
5417 IN_REGS is nonzero if the argument will be passed in registers. It will
5418 never be set if REG_PARM_STACK_SPACE is not defined.
5420 FNDECL is the function in which the argument was defined.
5422 There are two types of rounding that are done. The first, controlled by
5423 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5424 list to be aligned to the specific boundary (in bits). This rounding
5425 affects the initial and starting offsets, but not the argument size.
5427 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5428 optionally rounds the size of the parm to PARM_BOUNDARY. The
5429 initial offset is not affected by this rounding, while the size always
5430 is and the starting offset may be. */
5432 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5433 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5434 callers pass in the total size of args so far as
5435 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5437 void
5438 locate_and_pad_parm (passed_mode, type, in_regs, partial, fndecl,
5439 initial_offset_ptr, locate)
5440 enum machine_mode passed_mode;
5441 tree type;
5442 int in_regs;
5443 int partial;
5444 tree fndecl ATTRIBUTE_UNUSED;
5445 struct args_size *initial_offset_ptr;
5446 struct locate_and_pad_arg_data *locate;
5448 tree sizetree;
5449 enum direction where_pad;
5450 int boundary;
5451 int reg_parm_stack_space = 0;
5452 int part_size_in_regs;
5454 #ifdef REG_PARM_STACK_SPACE
5455 #ifdef MAYBE_REG_PARM_STACK_SPACE
5456 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5457 #else
5458 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5459 #endif
5461 /* If we have found a stack parm before we reach the end of the
5462 area reserved for registers, skip that area. */
5463 if (! in_regs)
5465 if (reg_parm_stack_space > 0)
5467 if (initial_offset_ptr->var)
5469 initial_offset_ptr->var
5470 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5471 ssize_int (reg_parm_stack_space));
5472 initial_offset_ptr->constant = 0;
5474 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5475 initial_offset_ptr->constant = reg_parm_stack_space;
5478 #endif /* REG_PARM_STACK_SPACE */
5480 part_size_in_regs = 0;
5481 if (reg_parm_stack_space == 0)
5482 part_size_in_regs = ((partial * UNITS_PER_WORD)
5483 / (PARM_BOUNDARY / BITS_PER_UNIT)
5484 * (PARM_BOUNDARY / BITS_PER_UNIT));
5486 sizetree
5487 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5488 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5489 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5491 #ifdef ARGS_GROW_DOWNWARD
5492 locate->slot_offset.constant = -initial_offset_ptr->constant;
5493 if (initial_offset_ptr->var)
5494 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
5495 initial_offset_ptr->var);
5498 tree s2 = sizetree;
5499 if (where_pad != none
5500 && (!host_integerp (sizetree, 1)
5501 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5502 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5503 SUB_PARM_SIZE (locate->slot_offset, s2);
5506 locate->slot_offset.constant += part_size_in_regs;
5508 if (!in_regs
5509 #ifdef REG_PARM_STACK_SPACE
5510 || REG_PARM_STACK_SPACE (fndecl) > 0
5511 #endif
5513 pad_to_arg_alignment (&locate->slot_offset, boundary,
5514 &locate->alignment_pad);
5516 locate->size.constant = (-initial_offset_ptr->constant
5517 - locate->slot_offset.constant);
5518 if (initial_offset_ptr->var)
5519 locate->size.var = size_binop (MINUS_EXPR,
5520 size_binop (MINUS_EXPR,
5521 ssize_int (0),
5522 initial_offset_ptr->var),
5523 locate->slot_offset.var);
5525 /* Pad_below needs the pre-rounded size to know how much to pad
5526 below. */
5527 locate->offset = locate->slot_offset;
5528 if (where_pad == downward)
5529 pad_below (&locate->offset, passed_mode, sizetree);
5531 #else /* !ARGS_GROW_DOWNWARD */
5532 if (!in_regs
5533 #ifdef REG_PARM_STACK_SPACE
5534 || REG_PARM_STACK_SPACE (fndecl) > 0
5535 #endif
5537 pad_to_arg_alignment (initial_offset_ptr, boundary,
5538 &locate->alignment_pad);
5539 locate->slot_offset = *initial_offset_ptr;
5541 #ifdef PUSH_ROUNDING
5542 if (passed_mode != BLKmode)
5543 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5544 #endif
5546 /* Pad_below needs the pre-rounded size to know how much to pad below
5547 so this must be done before rounding up. */
5548 locate->offset = locate->slot_offset;
5549 if (where_pad == downward)
5550 pad_below (&locate->offset, passed_mode, sizetree);
5552 if (where_pad != none
5553 && (!host_integerp (sizetree, 1)
5554 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5555 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5557 ADD_PARM_SIZE (locate->size, sizetree);
5559 locate->size.constant -= part_size_in_regs;
5560 #endif /* ARGS_GROW_DOWNWARD */
5563 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5564 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5566 static void
5567 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5568 struct args_size *offset_ptr;
5569 int boundary;
5570 struct args_size *alignment_pad;
5572 tree save_var = NULL_TREE;
5573 HOST_WIDE_INT save_constant = 0;
5575 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5577 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5579 save_var = offset_ptr->var;
5580 save_constant = offset_ptr->constant;
5583 alignment_pad->var = NULL_TREE;
5584 alignment_pad->constant = 0;
5586 if (boundary > BITS_PER_UNIT)
5588 if (offset_ptr->var)
5590 offset_ptr->var =
5591 #ifdef ARGS_GROW_DOWNWARD
5592 round_down
5593 #else
5594 round_up
5595 #endif
5596 (ARGS_SIZE_TREE (*offset_ptr),
5597 boundary / BITS_PER_UNIT);
5598 /* ARGS_SIZE_TREE includes constant term. */
5599 offset_ptr->constant = 0;
5600 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5601 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5602 save_var);
5604 else
5606 offset_ptr->constant =
5607 #ifdef ARGS_GROW_DOWNWARD
5608 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5609 #else
5610 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5611 #endif
5612 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5613 alignment_pad->constant = offset_ptr->constant - save_constant;
5618 static void
5619 pad_below (offset_ptr, passed_mode, sizetree)
5620 struct args_size *offset_ptr;
5621 enum machine_mode passed_mode;
5622 tree sizetree;
5624 if (passed_mode != BLKmode)
5626 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5627 offset_ptr->constant
5628 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5629 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5630 - GET_MODE_SIZE (passed_mode));
5632 else
5634 if (TREE_CODE (sizetree) != INTEGER_CST
5635 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5637 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5638 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5639 /* Add it in. */
5640 ADD_PARM_SIZE (*offset_ptr, s2);
5641 SUB_PARM_SIZE (*offset_ptr, sizetree);
5646 /* Walk the tree of blocks describing the binding levels within a function
5647 and warn about uninitialized variables.
5648 This is done after calling flow_analysis and before global_alloc
5649 clobbers the pseudo-regs to hard regs. */
5651 void
5652 uninitialized_vars_warning (block)
5653 tree block;
5655 tree decl, sub;
5656 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5658 if (warn_uninitialized
5659 && TREE_CODE (decl) == VAR_DECL
5660 /* These warnings are unreliable for and aggregates
5661 because assigning the fields one by one can fail to convince
5662 flow.c that the entire aggregate was initialized.
5663 Unions are troublesome because members may be shorter. */
5664 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5665 && DECL_RTL (decl) != 0
5666 && GET_CODE (DECL_RTL (decl)) == REG
5667 /* Global optimizations can make it difficult to determine if a
5668 particular variable has been initialized. However, a VAR_DECL
5669 with a nonzero DECL_INITIAL had an initializer, so do not
5670 claim it is potentially uninitialized.
5672 We do not care about the actual value in DECL_INITIAL, so we do
5673 not worry that it may be a dangling pointer. */
5674 && DECL_INITIAL (decl) == NULL_TREE
5675 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5676 warning_with_decl (decl,
5677 "`%s' might be used uninitialized in this function");
5678 if (extra_warnings
5679 && TREE_CODE (decl) == VAR_DECL
5680 && DECL_RTL (decl) != 0
5681 && GET_CODE (DECL_RTL (decl)) == REG
5682 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5683 warning_with_decl (decl,
5684 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5686 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5687 uninitialized_vars_warning (sub);
5690 /* Do the appropriate part of uninitialized_vars_warning
5691 but for arguments instead of local variables. */
5693 void
5694 setjmp_args_warning ()
5696 tree decl;
5697 for (decl = DECL_ARGUMENTS (current_function_decl);
5698 decl; decl = TREE_CHAIN (decl))
5699 if (DECL_RTL (decl) != 0
5700 && GET_CODE (DECL_RTL (decl)) == REG
5701 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5702 warning_with_decl (decl,
5703 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5706 /* If this function call setjmp, put all vars into the stack
5707 unless they were declared `register'. */
5709 void
5710 setjmp_protect (block)
5711 tree block;
5713 tree decl, sub;
5714 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5715 if ((TREE_CODE (decl) == VAR_DECL
5716 || TREE_CODE (decl) == PARM_DECL)
5717 && DECL_RTL (decl) != 0
5718 && (GET_CODE (DECL_RTL (decl)) == REG
5719 || (GET_CODE (DECL_RTL (decl)) == MEM
5720 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5721 /* If this variable came from an inline function, it must be
5722 that its life doesn't overlap the setjmp. If there was a
5723 setjmp in the function, it would already be in memory. We
5724 must exclude such variable because their DECL_RTL might be
5725 set to strange things such as virtual_stack_vars_rtx. */
5726 && ! DECL_FROM_INLINE (decl)
5727 && (
5728 #ifdef NON_SAVING_SETJMP
5729 /* If longjmp doesn't restore the registers,
5730 don't put anything in them. */
5731 NON_SAVING_SETJMP
5733 #endif
5734 ! DECL_REGISTER (decl)))
5735 put_var_into_stack (decl, /*rescan=*/true);
5736 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5737 setjmp_protect (sub);
5740 /* Like the previous function, but for args instead of local variables. */
5742 void
5743 setjmp_protect_args ()
5745 tree decl;
5746 for (decl = DECL_ARGUMENTS (current_function_decl);
5747 decl; decl = TREE_CHAIN (decl))
5748 if ((TREE_CODE (decl) == VAR_DECL
5749 || TREE_CODE (decl) == PARM_DECL)
5750 && DECL_RTL (decl) != 0
5751 && (GET_CODE (DECL_RTL (decl)) == REG
5752 || (GET_CODE (DECL_RTL (decl)) == MEM
5753 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5754 && (
5755 /* If longjmp doesn't restore the registers,
5756 don't put anything in them. */
5757 #ifdef NON_SAVING_SETJMP
5758 NON_SAVING_SETJMP
5760 #endif
5761 ! DECL_REGISTER (decl)))
5762 put_var_into_stack (decl, /*rescan=*/true);
5765 /* Return the context-pointer register corresponding to DECL,
5766 or 0 if it does not need one. */
5769 lookup_static_chain (decl)
5770 tree decl;
5772 tree context = decl_function_context (decl);
5773 tree link;
5775 if (context == 0
5776 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5777 return 0;
5779 /* We treat inline_function_decl as an alias for the current function
5780 because that is the inline function whose vars, types, etc.
5781 are being merged into the current function.
5782 See expand_inline_function. */
5783 if (context == current_function_decl || context == inline_function_decl)
5784 return virtual_stack_vars_rtx;
5786 for (link = context_display; link; link = TREE_CHAIN (link))
5787 if (TREE_PURPOSE (link) == context)
5788 return RTL_EXPR_RTL (TREE_VALUE (link));
5790 abort ();
5793 /* Convert a stack slot address ADDR for variable VAR
5794 (from a containing function)
5795 into an address valid in this function (using a static chain). */
5798 fix_lexical_addr (addr, var)
5799 rtx addr;
5800 tree var;
5802 rtx basereg;
5803 HOST_WIDE_INT displacement;
5804 tree context = decl_function_context (var);
5805 struct function *fp;
5806 rtx base = 0;
5808 /* If this is the present function, we need not do anything. */
5809 if (context == current_function_decl || context == inline_function_decl)
5810 return addr;
5812 fp = find_function_data (context);
5814 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5815 addr = XEXP (XEXP (addr, 0), 0);
5817 /* Decode given address as base reg plus displacement. */
5818 if (GET_CODE (addr) == REG)
5819 basereg = addr, displacement = 0;
5820 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5821 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5822 else
5823 abort ();
5825 /* We accept vars reached via the containing function's
5826 incoming arg pointer and via its stack variables pointer. */
5827 if (basereg == fp->internal_arg_pointer)
5829 /* If reached via arg pointer, get the arg pointer value
5830 out of that function's stack frame.
5832 There are two cases: If a separate ap is needed, allocate a
5833 slot in the outer function for it and dereference it that way.
5834 This is correct even if the real ap is actually a pseudo.
5835 Otherwise, just adjust the offset from the frame pointer to
5836 compensate. */
5838 #ifdef NEED_SEPARATE_AP
5839 rtx addr;
5841 addr = get_arg_pointer_save_area (fp);
5842 addr = fix_lexical_addr (XEXP (addr, 0), var);
5843 addr = memory_address (Pmode, addr);
5845 base = gen_rtx_MEM (Pmode, addr);
5846 set_mem_alias_set (base, get_frame_alias_set ());
5847 base = copy_to_reg (base);
5848 #else
5849 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5850 base = lookup_static_chain (var);
5851 #endif
5854 else if (basereg == virtual_stack_vars_rtx)
5856 /* This is the same code as lookup_static_chain, duplicated here to
5857 avoid an extra call to decl_function_context. */
5858 tree link;
5860 for (link = context_display; link; link = TREE_CHAIN (link))
5861 if (TREE_PURPOSE (link) == context)
5863 base = RTL_EXPR_RTL (TREE_VALUE (link));
5864 break;
5868 if (base == 0)
5869 abort ();
5871 /* Use same offset, relative to appropriate static chain or argument
5872 pointer. */
5873 return plus_constant (base, displacement);
5876 /* Return the address of the trampoline for entering nested fn FUNCTION.
5877 If necessary, allocate a trampoline (in the stack frame)
5878 and emit rtl to initialize its contents (at entry to this function). */
5881 trampoline_address (function)
5882 tree function;
5884 tree link;
5885 tree rtlexp;
5886 rtx tramp;
5887 struct function *fp;
5888 tree fn_context;
5890 /* Find an existing trampoline and return it. */
5891 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5892 if (TREE_PURPOSE (link) == function)
5893 return
5894 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5896 for (fp = outer_function_chain; fp; fp = fp->outer)
5897 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5898 if (TREE_PURPOSE (link) == function)
5900 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5901 function);
5902 return adjust_trampoline_addr (tramp);
5905 /* None exists; we must make one. */
5907 /* Find the `struct function' for the function containing FUNCTION. */
5908 fp = 0;
5909 fn_context = decl_function_context (function);
5910 if (fn_context != current_function_decl
5911 && fn_context != inline_function_decl)
5912 fp = find_function_data (fn_context);
5914 /* Allocate run-time space for this trampoline. */
5915 /* If rounding needed, allocate extra space
5916 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5917 #define TRAMPOLINE_REAL_SIZE \
5918 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5919 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5920 fp ? fp : cfun);
5921 /* Record the trampoline for reuse and note it for later initialization
5922 by expand_function_end. */
5923 if (fp != 0)
5925 rtlexp = make_node (RTL_EXPR);
5926 RTL_EXPR_RTL (rtlexp) = tramp;
5927 fp->x_trampoline_list = tree_cons (function, rtlexp,
5928 fp->x_trampoline_list);
5930 else
5932 /* Make the RTL_EXPR node temporary, not momentary, so that the
5933 trampoline_list doesn't become garbage. */
5934 rtlexp = make_node (RTL_EXPR);
5936 RTL_EXPR_RTL (rtlexp) = tramp;
5937 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5940 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5941 return adjust_trampoline_addr (tramp);
5944 /* Given a trampoline address,
5945 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5947 static rtx
5948 round_trampoline_addr (tramp)
5949 rtx tramp;
5951 /* Round address up to desired boundary. */
5952 rtx temp = gen_reg_rtx (Pmode);
5953 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5954 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5956 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5957 temp, 0, OPTAB_LIB_WIDEN);
5958 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5959 temp, 0, OPTAB_LIB_WIDEN);
5961 return tramp;
5964 /* Given a trampoline address, round it then apply any
5965 platform-specific adjustments so that the result can be used for a
5966 function call . */
5968 static rtx
5969 adjust_trampoline_addr (tramp)
5970 rtx tramp;
5972 tramp = round_trampoline_addr (tramp);
5973 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5974 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5975 #endif
5976 return tramp;
5979 /* Put all this function's BLOCK nodes including those that are chained
5980 onto the first block into a vector, and return it.
5981 Also store in each NOTE for the beginning or end of a block
5982 the index of that block in the vector.
5983 The arguments are BLOCK, the chain of top-level blocks of the function,
5984 and INSNS, the insn chain of the function. */
5986 void
5987 identify_blocks ()
5989 int n_blocks;
5990 tree *block_vector, *last_block_vector;
5991 tree *block_stack;
5992 tree block = DECL_INITIAL (current_function_decl);
5994 if (block == 0)
5995 return;
5997 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5998 depth-first order. */
5999 block_vector = get_block_vector (block, &n_blocks);
6000 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
6002 last_block_vector = identify_blocks_1 (get_insns (),
6003 block_vector + 1,
6004 block_vector + n_blocks,
6005 block_stack);
6007 /* If we didn't use all of the subblocks, we've misplaced block notes. */
6008 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
6009 if (0 && last_block_vector != block_vector + n_blocks)
6010 abort ();
6012 free (block_vector);
6013 free (block_stack);
6016 /* Subroutine of identify_blocks. Do the block substitution on the
6017 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
6019 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
6020 BLOCK_VECTOR is incremented for each block seen. */
6022 static tree *
6023 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
6024 rtx insns;
6025 tree *block_vector;
6026 tree *end_block_vector;
6027 tree *orig_block_stack;
6029 rtx insn;
6030 tree *block_stack = orig_block_stack;
6032 for (insn = insns; insn; insn = NEXT_INSN (insn))
6034 if (GET_CODE (insn) == NOTE)
6036 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6038 tree b;
6040 /* If there are more block notes than BLOCKs, something
6041 is badly wrong. */
6042 if (block_vector == end_block_vector)
6043 abort ();
6045 b = *block_vector++;
6046 NOTE_BLOCK (insn) = b;
6047 *block_stack++ = b;
6049 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6051 /* If there are more NOTE_INSN_BLOCK_ENDs than
6052 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
6053 if (block_stack == orig_block_stack)
6054 abort ();
6056 NOTE_BLOCK (insn) = *--block_stack;
6059 else if (GET_CODE (insn) == CALL_INSN
6060 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6062 rtx cp = PATTERN (insn);
6064 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
6065 end_block_vector, block_stack);
6066 if (XEXP (cp, 1))
6067 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
6068 end_block_vector, block_stack);
6069 if (XEXP (cp, 2))
6070 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
6071 end_block_vector, block_stack);
6075 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
6076 something is badly wrong. */
6077 if (block_stack != orig_block_stack)
6078 abort ();
6080 return block_vector;
6083 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
6084 and create duplicate blocks. */
6085 /* ??? Need an option to either create block fragments or to create
6086 abstract origin duplicates of a source block. It really depends
6087 on what optimization has been performed. */
6089 void
6090 reorder_blocks ()
6092 tree block = DECL_INITIAL (current_function_decl);
6093 varray_type block_stack;
6095 if (block == NULL_TREE)
6096 return;
6098 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
6100 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6101 reorder_blocks_0 (block);
6103 /* Prune the old trees away, so that they don't get in the way. */
6104 BLOCK_SUBBLOCKS (block) = NULL_TREE;
6105 BLOCK_CHAIN (block) = NULL_TREE;
6107 /* Recreate the block tree from the note nesting. */
6108 reorder_blocks_1 (get_insns (), block, &block_stack);
6109 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
6111 /* Remove deleted blocks from the block fragment chains. */
6112 reorder_fix_fragments (block);
6115 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
6117 static void
6118 reorder_blocks_0 (block)
6119 tree block;
6121 while (block)
6123 TREE_ASM_WRITTEN (block) = 0;
6124 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
6125 block = BLOCK_CHAIN (block);
6129 static void
6130 reorder_blocks_1 (insns, current_block, p_block_stack)
6131 rtx insns;
6132 tree current_block;
6133 varray_type *p_block_stack;
6135 rtx insn;
6137 for (insn = insns; insn; insn = NEXT_INSN (insn))
6139 if (GET_CODE (insn) == NOTE)
6141 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6143 tree block = NOTE_BLOCK (insn);
6145 /* If we have seen this block before, that means it now
6146 spans multiple address regions. Create a new fragment. */
6147 if (TREE_ASM_WRITTEN (block))
6149 tree new_block = copy_node (block);
6150 tree origin;
6152 origin = (BLOCK_FRAGMENT_ORIGIN (block)
6153 ? BLOCK_FRAGMENT_ORIGIN (block)
6154 : block);
6155 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
6156 BLOCK_FRAGMENT_CHAIN (new_block)
6157 = BLOCK_FRAGMENT_CHAIN (origin);
6158 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
6160 NOTE_BLOCK (insn) = new_block;
6161 block = new_block;
6164 BLOCK_SUBBLOCKS (block) = 0;
6165 TREE_ASM_WRITTEN (block) = 1;
6166 /* When there's only one block for the entire function,
6167 current_block == block and we mustn't do this, it
6168 will cause infinite recursion. */
6169 if (block != current_block)
6171 BLOCK_SUPERCONTEXT (block) = current_block;
6172 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6173 BLOCK_SUBBLOCKS (current_block) = block;
6174 current_block = block;
6176 VARRAY_PUSH_TREE (*p_block_stack, block);
6178 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6180 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6181 VARRAY_POP (*p_block_stack);
6182 BLOCK_SUBBLOCKS (current_block)
6183 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6184 current_block = BLOCK_SUPERCONTEXT (current_block);
6187 else if (GET_CODE (insn) == CALL_INSN
6188 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6190 rtx cp = PATTERN (insn);
6191 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6192 if (XEXP (cp, 1))
6193 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6194 if (XEXP (cp, 2))
6195 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6200 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6201 appears in the block tree, select one of the fragments to become
6202 the new origin block. */
6204 static void
6205 reorder_fix_fragments (block)
6206 tree block;
6208 while (block)
6210 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6211 tree new_origin = NULL_TREE;
6213 if (dup_origin)
6215 if (! TREE_ASM_WRITTEN (dup_origin))
6217 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6219 /* Find the first of the remaining fragments. There must
6220 be at least one -- the current block. */
6221 while (! TREE_ASM_WRITTEN (new_origin))
6222 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6223 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6226 else if (! dup_origin)
6227 new_origin = block;
6229 /* Re-root the rest of the fragments to the new origin. In the
6230 case that DUP_ORIGIN was null, that means BLOCK was the origin
6231 of a chain of fragments and we want to remove those fragments
6232 that didn't make it to the output. */
6233 if (new_origin)
6235 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6236 tree chain = *pp;
6238 while (chain)
6240 if (TREE_ASM_WRITTEN (chain))
6242 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6243 *pp = chain;
6244 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6246 chain = BLOCK_FRAGMENT_CHAIN (chain);
6248 *pp = NULL_TREE;
6251 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6252 block = BLOCK_CHAIN (block);
6256 /* Reverse the order of elements in the chain T of blocks,
6257 and return the new head of the chain (old last element). */
6259 static tree
6260 blocks_nreverse (t)
6261 tree t;
6263 tree prev = 0, decl, next;
6264 for (decl = t; decl; decl = next)
6266 next = BLOCK_CHAIN (decl);
6267 BLOCK_CHAIN (decl) = prev;
6268 prev = decl;
6270 return prev;
6273 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6274 non-NULL, list them all into VECTOR, in a depth-first preorder
6275 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6276 blocks. */
6278 static int
6279 all_blocks (block, vector)
6280 tree block;
6281 tree *vector;
6283 int n_blocks = 0;
6285 while (block)
6287 TREE_ASM_WRITTEN (block) = 0;
6289 /* Record this block. */
6290 if (vector)
6291 vector[n_blocks] = block;
6293 ++n_blocks;
6295 /* Record the subblocks, and their subblocks... */
6296 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6297 vector ? vector + n_blocks : 0);
6298 block = BLOCK_CHAIN (block);
6301 return n_blocks;
6304 /* Return a vector containing all the blocks rooted at BLOCK. The
6305 number of elements in the vector is stored in N_BLOCKS_P. The
6306 vector is dynamically allocated; it is the caller's responsibility
6307 to call `free' on the pointer returned. */
6309 static tree *
6310 get_block_vector (block, n_blocks_p)
6311 tree block;
6312 int *n_blocks_p;
6314 tree *block_vector;
6316 *n_blocks_p = all_blocks (block, NULL);
6317 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6318 all_blocks (block, block_vector);
6320 return block_vector;
6323 static GTY(()) int next_block_index = 2;
6325 /* Set BLOCK_NUMBER for all the blocks in FN. */
6327 void
6328 number_blocks (fn)
6329 tree fn;
6331 int i;
6332 int n_blocks;
6333 tree *block_vector;
6335 /* For SDB and XCOFF debugging output, we start numbering the blocks
6336 from 1 within each function, rather than keeping a running
6337 count. */
6338 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6339 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6340 next_block_index = 1;
6341 #endif
6343 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6345 /* The top-level BLOCK isn't numbered at all. */
6346 for (i = 1; i < n_blocks; ++i)
6347 /* We number the blocks from two. */
6348 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6350 free (block_vector);
6352 return;
6355 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6357 tree
6358 debug_find_var_in_block_tree (var, block)
6359 tree var;
6360 tree block;
6362 tree t;
6364 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6365 if (t == var)
6366 return block;
6368 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6370 tree ret = debug_find_var_in_block_tree (var, t);
6371 if (ret)
6372 return ret;
6375 return NULL_TREE;
6378 /* Allocate a function structure and reset its contents to the defaults. */
6380 static void
6381 prepare_function_start ()
6383 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6385 init_stmt_for_function ();
6386 init_eh_for_function ();
6388 cse_not_expected = ! optimize;
6390 /* Caller save not needed yet. */
6391 caller_save_needed = 0;
6393 /* No stack slots have been made yet. */
6394 stack_slot_list = 0;
6396 current_function_has_nonlocal_label = 0;
6397 current_function_has_nonlocal_goto = 0;
6399 /* There is no stack slot for handling nonlocal gotos. */
6400 nonlocal_goto_handler_slots = 0;
6401 nonlocal_goto_stack_level = 0;
6403 /* No labels have been declared for nonlocal use. */
6404 nonlocal_labels = 0;
6405 nonlocal_goto_handler_labels = 0;
6407 /* No function calls so far in this function. */
6408 function_call_count = 0;
6410 /* No parm regs have been allocated.
6411 (This is important for output_inline_function.) */
6412 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6414 /* Initialize the RTL mechanism. */
6415 init_emit ();
6417 /* Initialize the queue of pending postincrement and postdecrements,
6418 and some other info in expr.c. */
6419 init_expr ();
6421 /* We haven't done register allocation yet. */
6422 reg_renumber = 0;
6424 init_varasm_status (cfun);
6426 /* Clear out data used for inlining. */
6427 cfun->inlinable = 0;
6428 cfun->original_decl_initial = 0;
6429 cfun->original_arg_vector = 0;
6431 cfun->stack_alignment_needed = STACK_BOUNDARY;
6432 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6434 /* Set if a call to setjmp is seen. */
6435 current_function_calls_setjmp = 0;
6437 /* Set if a call to longjmp is seen. */
6438 current_function_calls_longjmp = 0;
6440 current_function_calls_alloca = 0;
6441 current_function_calls_eh_return = 0;
6442 current_function_calls_constant_p = 0;
6443 current_function_contains_functions = 0;
6444 current_function_is_leaf = 0;
6445 current_function_nothrow = 0;
6446 current_function_sp_is_unchanging = 0;
6447 current_function_uses_only_leaf_regs = 0;
6448 current_function_has_computed_jump = 0;
6449 current_function_is_thunk = 0;
6451 current_function_returns_pcc_struct = 0;
6452 current_function_returns_struct = 0;
6453 current_function_epilogue_delay_list = 0;
6454 current_function_uses_const_pool = 0;
6455 current_function_uses_pic_offset_table = 0;
6456 current_function_cannot_inline = 0;
6458 /* We have not yet needed to make a label to jump to for tail-recursion. */
6459 tail_recursion_label = 0;
6461 /* We haven't had a need to make a save area for ap yet. */
6462 arg_pointer_save_area = 0;
6464 /* No stack slots allocated yet. */
6465 frame_offset = 0;
6467 /* No SAVE_EXPRs in this function yet. */
6468 save_expr_regs = 0;
6470 /* No RTL_EXPRs in this function yet. */
6471 rtl_expr_chain = 0;
6473 /* Set up to allocate temporaries. */
6474 init_temp_slots ();
6476 /* Indicate that we need to distinguish between the return value of the
6477 present function and the return value of a function being called. */
6478 rtx_equal_function_value_matters = 1;
6480 /* Indicate that we have not instantiated virtual registers yet. */
6481 virtuals_instantiated = 0;
6483 /* Indicate that we want CONCATs now. */
6484 generating_concat_p = 1;
6486 /* Indicate we have no need of a frame pointer yet. */
6487 frame_pointer_needed = 0;
6489 /* By default assume not stdarg. */
6490 current_function_stdarg = 0;
6492 /* We haven't made any trampolines for this function yet. */
6493 trampoline_list = 0;
6495 init_pending_stack_adjust ();
6496 inhibit_defer_pop = 0;
6498 current_function_outgoing_args_size = 0;
6500 current_function_funcdef_no = funcdef_no++;
6502 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6504 cfun->max_jumptable_ents = 0;
6506 (*lang_hooks.function.init) (cfun);
6507 if (init_machine_status)
6508 cfun->machine = (*init_machine_status) ();
6511 /* Initialize the rtl expansion mechanism so that we can do simple things
6512 like generate sequences. This is used to provide a context during global
6513 initialization of some passes. */
6514 void
6515 init_dummy_function_start ()
6517 prepare_function_start ();
6520 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6521 and initialize static variables for generating RTL for the statements
6522 of the function. */
6524 void
6525 init_function_start (subr)
6526 tree subr;
6528 prepare_function_start ();
6530 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6531 cfun->decl = subr;
6533 /* Nonzero if this is a nested function that uses a static chain. */
6535 current_function_needs_context
6536 = (decl_function_context (current_function_decl) != 0
6537 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6539 /* Within function body, compute a type's size as soon it is laid out. */
6540 immediate_size_expand++;
6542 /* Prevent ever trying to delete the first instruction of a
6543 function. Also tell final how to output a linenum before the
6544 function prologue. Note linenums could be missing, e.g. when
6545 compiling a Java .class file. */
6546 if (DECL_SOURCE_LINE (subr))
6547 emit_line_note (DECL_SOURCE_FILE (subr), DECL_SOURCE_LINE (subr));
6549 /* Make sure first insn is a note even if we don't want linenums.
6550 This makes sure the first insn will never be deleted.
6551 Also, final expects a note to appear there. */
6552 emit_note (NULL, NOTE_INSN_DELETED);
6554 /* Set flags used by final.c. */
6555 if (aggregate_value_p (DECL_RESULT (subr)))
6557 #ifdef PCC_STATIC_STRUCT_RETURN
6558 current_function_returns_pcc_struct = 1;
6559 #endif
6560 current_function_returns_struct = 1;
6563 /* Warn if this value is an aggregate type,
6564 regardless of which calling convention we are using for it. */
6565 if (warn_aggregate_return
6566 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6567 warning ("function returns an aggregate");
6569 current_function_returns_pointer
6570 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6573 /* Make sure all values used by the optimization passes have sane
6574 defaults. */
6575 void
6576 init_function_for_compilation ()
6578 reg_renumber = 0;
6580 /* No prologue/epilogue insns yet. */
6581 VARRAY_GROW (prologue, 0);
6582 VARRAY_GROW (epilogue, 0);
6583 VARRAY_GROW (sibcall_epilogue, 0);
6586 /* Expand a call to __main at the beginning of a possible main function. */
6588 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6589 #undef HAS_INIT_SECTION
6590 #define HAS_INIT_SECTION
6591 #endif
6593 void
6594 expand_main_function ()
6596 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6597 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6599 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6600 rtx tmp, seq;
6602 start_sequence ();
6603 /* Forcibly align the stack. */
6604 #ifdef STACK_GROWS_DOWNWARD
6605 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6606 stack_pointer_rtx, 1, OPTAB_WIDEN);
6607 #else
6608 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6609 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6610 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6611 stack_pointer_rtx, 1, OPTAB_WIDEN);
6612 #endif
6613 if (tmp != stack_pointer_rtx)
6614 emit_move_insn (stack_pointer_rtx, tmp);
6616 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6617 tmp = force_reg (Pmode, const0_rtx);
6618 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6619 seq = get_insns ();
6620 end_sequence ();
6622 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6623 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6624 break;
6625 if (tmp)
6626 emit_insn_before (seq, tmp);
6627 else
6628 emit_insn (seq);
6630 #endif
6632 #ifndef HAS_INIT_SECTION
6633 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6634 #endif
6637 /* The PENDING_SIZES represent the sizes of variable-sized types.
6638 Create RTL for the various sizes now (using temporary variables),
6639 so that we can refer to the sizes from the RTL we are generating
6640 for the current function. The PENDING_SIZES are a TREE_LIST. The
6641 TREE_VALUE of each node is a SAVE_EXPR. */
6643 void
6644 expand_pending_sizes (pending_sizes)
6645 tree pending_sizes;
6647 tree tem;
6649 /* Evaluate now the sizes of any types declared among the arguments. */
6650 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6652 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6653 /* Flush the queue in case this parameter declaration has
6654 side-effects. */
6655 emit_queue ();
6659 /* Start the RTL for a new function, and set variables used for
6660 emitting RTL.
6661 SUBR is the FUNCTION_DECL node.
6662 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6663 the function's parameters, which must be run at any return statement. */
6665 void
6666 expand_function_start (subr, parms_have_cleanups)
6667 tree subr;
6668 int parms_have_cleanups;
6670 tree tem;
6671 rtx last_ptr = NULL_RTX;
6673 /* Make sure volatile mem refs aren't considered
6674 valid operands of arithmetic insns. */
6675 init_recog_no_volatile ();
6677 current_function_instrument_entry_exit
6678 = (flag_instrument_function_entry_exit
6679 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6681 current_function_profile
6682 = (profile_flag
6683 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6685 current_function_limit_stack
6686 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6688 /* If function gets a static chain arg, store it in the stack frame.
6689 Do this first, so it gets the first stack slot offset. */
6690 if (current_function_needs_context)
6692 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6694 /* Delay copying static chain if it is not a register to avoid
6695 conflicts with regs used for parameters. */
6696 if (! SMALL_REGISTER_CLASSES
6697 || GET_CODE (static_chain_incoming_rtx) == REG)
6698 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6701 /* If the parameters of this function need cleaning up, get a label
6702 for the beginning of the code which executes those cleanups. This must
6703 be done before doing anything with return_label. */
6704 if (parms_have_cleanups)
6705 cleanup_label = gen_label_rtx ();
6706 else
6707 cleanup_label = 0;
6709 /* Make the label for return statements to jump to. Do not special
6710 case machines with special return instructions -- they will be
6711 handled later during jump, ifcvt, or epilogue creation. */
6712 return_label = gen_label_rtx ();
6714 /* Initialize rtx used to return the value. */
6715 /* Do this before assign_parms so that we copy the struct value address
6716 before any library calls that assign parms might generate. */
6718 /* Decide whether to return the value in memory or in a register. */
6719 if (aggregate_value_p (DECL_RESULT (subr)))
6721 /* Returning something that won't go in a register. */
6722 rtx value_address = 0;
6724 #ifdef PCC_STATIC_STRUCT_RETURN
6725 if (current_function_returns_pcc_struct)
6727 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6728 value_address = assemble_static_space (size);
6730 else
6731 #endif
6733 /* Expect to be passed the address of a place to store the value.
6734 If it is passed as an argument, assign_parms will take care of
6735 it. */
6736 if (struct_value_incoming_rtx)
6738 value_address = gen_reg_rtx (Pmode);
6739 emit_move_insn (value_address, struct_value_incoming_rtx);
6742 if (value_address)
6744 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6745 set_mem_attributes (x, DECL_RESULT (subr), 1);
6746 SET_DECL_RTL (DECL_RESULT (subr), x);
6749 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6750 /* If return mode is void, this decl rtl should not be used. */
6751 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6752 else
6754 /* Compute the return values into a pseudo reg, which we will copy
6755 into the true return register after the cleanups are done. */
6757 /* In order to figure out what mode to use for the pseudo, we
6758 figure out what the mode of the eventual return register will
6759 actually be, and use that. */
6760 rtx hard_reg
6761 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6762 subr, 1);
6764 /* Structures that are returned in registers are not aggregate_value_p,
6765 so we may see a PARALLEL or a REG. */
6766 if (REG_P (hard_reg))
6767 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6768 else if (GET_CODE (hard_reg) == PARALLEL)
6769 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6770 else
6771 abort ();
6773 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6774 result to the real return register(s). */
6775 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6778 /* Initialize rtx for parameters and local variables.
6779 In some cases this requires emitting insns. */
6781 assign_parms (subr);
6783 /* Copy the static chain now if it wasn't a register. The delay is to
6784 avoid conflicts with the parameter passing registers. */
6786 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6787 if (GET_CODE (static_chain_incoming_rtx) != REG)
6788 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6790 /* The following was moved from init_function_start.
6791 The move is supposed to make sdb output more accurate. */
6792 /* Indicate the beginning of the function body,
6793 as opposed to parm setup. */
6794 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6796 if (GET_CODE (get_last_insn ()) != NOTE)
6797 emit_note (NULL, NOTE_INSN_DELETED);
6798 parm_birth_insn = get_last_insn ();
6800 context_display = 0;
6801 if (current_function_needs_context)
6803 /* Fetch static chain values for containing functions. */
6804 tem = decl_function_context (current_function_decl);
6805 /* Copy the static chain pointer into a pseudo. If we have
6806 small register classes, copy the value from memory if
6807 static_chain_incoming_rtx is a REG. */
6808 if (tem)
6810 /* If the static chain originally came in a register, put it back
6811 there, then move it out in the next insn. The reason for
6812 this peculiar code is to satisfy function integration. */
6813 if (SMALL_REGISTER_CLASSES
6814 && GET_CODE (static_chain_incoming_rtx) == REG)
6815 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6816 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6819 while (tem)
6821 tree rtlexp = make_node (RTL_EXPR);
6823 RTL_EXPR_RTL (rtlexp) = last_ptr;
6824 context_display = tree_cons (tem, rtlexp, context_display);
6825 tem = decl_function_context (tem);
6826 if (tem == 0)
6827 break;
6828 /* Chain thru stack frames, assuming pointer to next lexical frame
6829 is found at the place we always store it. */
6830 #ifdef FRAME_GROWS_DOWNWARD
6831 last_ptr = plus_constant (last_ptr,
6832 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6833 #endif
6834 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6835 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6836 last_ptr = copy_to_reg (last_ptr);
6838 /* If we are not optimizing, ensure that we know that this
6839 piece of context is live over the entire function. */
6840 if (! optimize)
6841 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6842 save_expr_regs);
6846 if (current_function_instrument_entry_exit)
6848 rtx fun = DECL_RTL (current_function_decl);
6849 if (GET_CODE (fun) == MEM)
6850 fun = XEXP (fun, 0);
6851 else
6852 abort ();
6853 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6854 2, fun, Pmode,
6855 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6857 hard_frame_pointer_rtx),
6858 Pmode);
6861 if (current_function_profile)
6863 #ifdef PROFILE_HOOK
6864 PROFILE_HOOK (current_function_funcdef_no);
6865 #endif
6868 /* After the display initializations is where the tail-recursion label
6869 should go, if we end up needing one. Ensure we have a NOTE here
6870 since some things (like trampolines) get placed before this. */
6871 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6873 /* Evaluate now the sizes of any types declared among the arguments. */
6874 expand_pending_sizes (nreverse (get_pending_sizes ()));
6876 /* Make sure there is a line number after the function entry setup code. */
6877 force_next_line_note ();
6880 /* Undo the effects of init_dummy_function_start. */
6881 void
6882 expand_dummy_function_end ()
6884 /* End any sequences that failed to be closed due to syntax errors. */
6885 while (in_sequence_p ())
6886 end_sequence ();
6888 /* Outside function body, can't compute type's actual size
6889 until next function's body starts. */
6891 free_after_parsing (cfun);
6892 free_after_compilation (cfun);
6893 cfun = 0;
6896 /* Call DOIT for each hard register used as a return value from
6897 the current function. */
6899 void
6900 diddle_return_value (doit, arg)
6901 void (*doit) PARAMS ((rtx, void *));
6902 void *arg;
6904 rtx outgoing = current_function_return_rtx;
6906 if (! outgoing)
6907 return;
6909 if (GET_CODE (outgoing) == REG)
6910 (*doit) (outgoing, arg);
6911 else if (GET_CODE (outgoing) == PARALLEL)
6913 int i;
6915 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6917 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6919 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6920 (*doit) (x, arg);
6925 static void
6926 do_clobber_return_reg (reg, arg)
6927 rtx reg;
6928 void *arg ATTRIBUTE_UNUSED;
6930 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6933 void
6934 clobber_return_register ()
6936 diddle_return_value (do_clobber_return_reg, NULL);
6938 /* In case we do use pseudo to return value, clobber it too. */
6939 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6941 tree decl_result = DECL_RESULT (current_function_decl);
6942 rtx decl_rtl = DECL_RTL (decl_result);
6943 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6945 do_clobber_return_reg (decl_rtl, NULL);
6950 static void
6951 do_use_return_reg (reg, arg)
6952 rtx reg;
6953 void *arg ATTRIBUTE_UNUSED;
6955 emit_insn (gen_rtx_USE (VOIDmode, reg));
6958 void
6959 use_return_register ()
6961 diddle_return_value (do_use_return_reg, NULL);
6964 static GTY(()) rtx initial_trampoline;
6966 /* Generate RTL for the end of the current function.
6967 FILENAME and LINE are the current position in the source file.
6969 It is up to language-specific callers to do cleanups for parameters--
6970 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6972 void
6973 expand_function_end (filename, line, end_bindings)
6974 const char *filename;
6975 int line;
6976 int end_bindings;
6978 tree link;
6979 rtx clobber_after;
6981 finish_expr_for_function ();
6983 /* If arg_pointer_save_area was referenced only from a nested
6984 function, we will not have initialized it yet. Do that now. */
6985 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6986 get_arg_pointer_save_area (cfun);
6988 #ifdef NON_SAVING_SETJMP
6989 /* Don't put any variables in registers if we call setjmp
6990 on a machine that fails to restore the registers. */
6991 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6993 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6994 setjmp_protect (DECL_INITIAL (current_function_decl));
6996 setjmp_protect_args ();
6998 #endif
7000 /* Initialize any trampolines required by this function. */
7001 for (link = trampoline_list; link; link = TREE_CHAIN (link))
7003 tree function = TREE_PURPOSE (link);
7004 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
7005 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
7006 #ifdef TRAMPOLINE_TEMPLATE
7007 rtx blktramp;
7008 #endif
7009 rtx seq;
7011 #ifdef TRAMPOLINE_TEMPLATE
7012 /* First make sure this compilation has a template for
7013 initializing trampolines. */
7014 if (initial_trampoline == 0)
7016 initial_trampoline
7017 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
7018 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
7020 #endif
7022 /* Generate insns to initialize the trampoline. */
7023 start_sequence ();
7024 tramp = round_trampoline_addr (XEXP (tramp, 0));
7025 #ifdef TRAMPOLINE_TEMPLATE
7026 blktramp = replace_equiv_address (initial_trampoline, tramp);
7027 emit_block_move (blktramp, initial_trampoline,
7028 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
7029 #endif
7030 trampolines_created = 1;
7031 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
7032 seq = get_insns ();
7033 end_sequence ();
7035 /* Put those insns at entry to the containing function (this one). */
7036 emit_insn_before (seq, tail_recursion_reentry);
7039 /* If we are doing stack checking and this function makes calls,
7040 do a stack probe at the start of the function to ensure we have enough
7041 space for another stack frame. */
7042 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
7044 rtx insn, seq;
7046 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7047 if (GET_CODE (insn) == CALL_INSN)
7049 start_sequence ();
7050 probe_stack_range (STACK_CHECK_PROTECT,
7051 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
7052 seq = get_insns ();
7053 end_sequence ();
7054 emit_insn_before (seq, tail_recursion_reentry);
7055 break;
7059 /* Possibly warn about unused parameters. */
7060 if (warn_unused_parameter)
7062 tree decl;
7064 for (decl = DECL_ARGUMENTS (current_function_decl);
7065 decl; decl = TREE_CHAIN (decl))
7066 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
7067 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
7068 warning_with_decl (decl, "unused parameter `%s'");
7071 /* Delete handlers for nonlocal gotos if nothing uses them. */
7072 if (nonlocal_goto_handler_slots != 0
7073 && ! current_function_has_nonlocal_label)
7074 delete_handlers ();
7076 /* End any sequences that failed to be closed due to syntax errors. */
7077 while (in_sequence_p ())
7078 end_sequence ();
7080 /* Outside function body, can't compute type's actual size
7081 until next function's body starts. */
7082 immediate_size_expand--;
7084 clear_pending_stack_adjust ();
7085 do_pending_stack_adjust ();
7087 /* Mark the end of the function body.
7088 If control reaches this insn, the function can drop through
7089 without returning a value. */
7090 emit_note (NULL, NOTE_INSN_FUNCTION_END);
7092 /* Must mark the last line number note in the function, so that the test
7093 coverage code can avoid counting the last line twice. This just tells
7094 the code to ignore the immediately following line note, since there
7095 already exists a copy of this note somewhere above. This line number
7096 note is still needed for debugging though, so we can't delete it. */
7097 if (flag_test_coverage)
7098 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
7100 /* Output a linenumber for the end of the function.
7101 SDB depends on this. */
7102 emit_line_note_force (filename, line);
7104 /* Before the return label (if any), clobber the return
7105 registers so that they are not propagated live to the rest of
7106 the function. This can only happen with functions that drop
7107 through; if there had been a return statement, there would
7108 have either been a return rtx, or a jump to the return label.
7110 We delay actual code generation after the current_function_value_rtx
7111 is computed. */
7112 clobber_after = get_last_insn ();
7114 /* Output the label for the actual return from the function,
7115 if one is expected. This happens either because a function epilogue
7116 is used instead of a return instruction, or because a return was done
7117 with a goto in order to run local cleanups, or because of pcc-style
7118 structure returning. */
7119 if (return_label)
7120 emit_label (return_label);
7122 /* C++ uses this. */
7123 if (end_bindings)
7124 expand_end_bindings (0, 0, 0);
7126 if (current_function_instrument_entry_exit)
7128 rtx fun = DECL_RTL (current_function_decl);
7129 if (GET_CODE (fun) == MEM)
7130 fun = XEXP (fun, 0);
7131 else
7132 abort ();
7133 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
7134 2, fun, Pmode,
7135 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
7137 hard_frame_pointer_rtx),
7138 Pmode);
7141 /* Let except.c know where it should emit the call to unregister
7142 the function context for sjlj exceptions. */
7143 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
7144 sjlj_emit_function_exit_after (get_last_insn ());
7146 /* If we had calls to alloca, and this machine needs
7147 an accurate stack pointer to exit the function,
7148 insert some code to save and restore the stack pointer. */
7149 #ifdef EXIT_IGNORE_STACK
7150 if (! EXIT_IGNORE_STACK)
7151 #endif
7152 if (current_function_calls_alloca)
7154 rtx tem = 0;
7156 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7157 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7160 /* If scalar return value was computed in a pseudo-reg, or was a named
7161 return value that got dumped to the stack, copy that to the hard
7162 return register. */
7163 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7165 tree decl_result = DECL_RESULT (current_function_decl);
7166 rtx decl_rtl = DECL_RTL (decl_result);
7168 if (REG_P (decl_rtl)
7169 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7170 : DECL_REGISTER (decl_result))
7172 rtx real_decl_rtl = current_function_return_rtx;
7174 /* This should be set in assign_parms. */
7175 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7176 abort ();
7178 /* If this is a BLKmode structure being returned in registers,
7179 then use the mode computed in expand_return. Note that if
7180 decl_rtl is memory, then its mode may have been changed,
7181 but that current_function_return_rtx has not. */
7182 if (GET_MODE (real_decl_rtl) == BLKmode)
7183 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7185 /* If a named return value dumped decl_return to memory, then
7186 we may need to re-do the PROMOTE_MODE signed/unsigned
7187 extension. */
7188 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7190 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7192 #ifdef PROMOTE_FUNCTION_RETURN
7193 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7194 &unsignedp, 1);
7195 #endif
7197 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7199 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7201 /* If expand_function_start has created a PARALLEL for decl_rtl,
7202 move the result to the real return registers. Otherwise, do
7203 a group load from decl_rtl for a named return. */
7204 if (GET_CODE (decl_rtl) == PARALLEL)
7205 emit_group_move (real_decl_rtl, decl_rtl);
7206 else
7207 emit_group_load (real_decl_rtl, decl_rtl,
7208 int_size_in_bytes (TREE_TYPE (decl_result)));
7210 else
7211 emit_move_insn (real_decl_rtl, decl_rtl);
7215 /* If returning a structure, arrange to return the address of the value
7216 in a place where debuggers expect to find it.
7218 If returning a structure PCC style,
7219 the caller also depends on this value.
7220 And current_function_returns_pcc_struct is not necessarily set. */
7221 if (current_function_returns_struct
7222 || current_function_returns_pcc_struct)
7224 rtx value_address
7225 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7226 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7227 #ifdef FUNCTION_OUTGOING_VALUE
7228 rtx outgoing
7229 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7230 current_function_decl);
7231 #else
7232 rtx outgoing
7233 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7234 #endif
7236 /* Mark this as a function return value so integrate will delete the
7237 assignment and USE below when inlining this function. */
7238 REG_FUNCTION_VALUE_P (outgoing) = 1;
7240 #ifdef POINTERS_EXTEND_UNSIGNED
7241 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7242 if (GET_MODE (outgoing) != GET_MODE (value_address))
7243 value_address = convert_memory_address (GET_MODE (outgoing),
7244 value_address);
7245 #endif
7247 emit_move_insn (outgoing, value_address);
7249 /* Show return register used to hold result (in this case the address
7250 of the result. */
7251 current_function_return_rtx = outgoing;
7254 /* If this is an implementation of throw, do what's necessary to
7255 communicate between __builtin_eh_return and the epilogue. */
7256 expand_eh_return ();
7258 /* Emit the actual code to clobber return register. */
7260 rtx seq, after;
7262 start_sequence ();
7263 clobber_return_register ();
7264 seq = get_insns ();
7265 end_sequence ();
7267 after = emit_insn_after (seq, clobber_after);
7269 if (clobber_after != after)
7270 cfun->x_clobber_return_insn = after;
7273 /* ??? This should no longer be necessary since stupid is no longer with
7274 us, but there are some parts of the compiler (eg reload_combine, and
7275 sh mach_dep_reorg) that still try and compute their own lifetime info
7276 instead of using the general framework. */
7277 use_return_register ();
7279 /* Fix up any gotos that jumped out to the outermost
7280 binding level of the function.
7281 Must follow emitting RETURN_LABEL. */
7283 /* If you have any cleanups to do at this point,
7284 and they need to create temporary variables,
7285 then you will lose. */
7286 expand_fixups (get_insns ());
7290 get_arg_pointer_save_area (f)
7291 struct function *f;
7293 rtx ret = f->x_arg_pointer_save_area;
7295 if (! ret)
7297 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7298 f->x_arg_pointer_save_area = ret;
7301 if (f == cfun && ! f->arg_pointer_save_area_init)
7303 rtx seq;
7305 /* Save the arg pointer at the beginning of the function. The
7306 generated stack slot may not be a valid memory address, so we
7307 have to check it and fix it if necessary. */
7308 start_sequence ();
7309 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7310 seq = get_insns ();
7311 end_sequence ();
7313 push_topmost_sequence ();
7314 emit_insn_after (seq, get_insns ());
7315 pop_topmost_sequence ();
7318 return ret;
7321 /* Extend a vector that records the INSN_UIDs of INSNS
7322 (a list of one or more insns). */
7324 static void
7325 record_insns (insns, vecp)
7326 rtx insns;
7327 varray_type *vecp;
7329 int i, len;
7330 rtx tmp;
7332 tmp = insns;
7333 len = 0;
7334 while (tmp != NULL_RTX)
7336 len++;
7337 tmp = NEXT_INSN (tmp);
7340 i = VARRAY_SIZE (*vecp);
7341 VARRAY_GROW (*vecp, i + len);
7342 tmp = insns;
7343 while (tmp != NULL_RTX)
7345 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7346 i++;
7347 tmp = NEXT_INSN (tmp);
7351 /* Set the specified locator to the insn chain. */
7352 static void
7353 set_insn_locators (insn, loc)
7354 rtx insn;
7355 int loc;
7357 while (insn != NULL_RTX)
7359 if (INSN_P (insn))
7360 INSN_LOCATOR (insn) = loc;
7361 insn = NEXT_INSN (insn);
7365 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7366 be running after reorg, SEQUENCE rtl is possible. */
7368 static int
7369 contains (insn, vec)
7370 rtx insn;
7371 varray_type vec;
7373 int i, j;
7375 if (GET_CODE (insn) == INSN
7376 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7378 int count = 0;
7379 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7380 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7381 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7382 count++;
7383 return count;
7385 else
7387 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7388 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7389 return 1;
7391 return 0;
7395 prologue_epilogue_contains (insn)
7396 rtx insn;
7398 if (contains (insn, prologue))
7399 return 1;
7400 if (contains (insn, epilogue))
7401 return 1;
7402 return 0;
7406 sibcall_epilogue_contains (insn)
7407 rtx insn;
7409 if (sibcall_epilogue)
7410 return contains (insn, sibcall_epilogue);
7411 return 0;
7414 #ifdef HAVE_return
7415 /* Insert gen_return at the end of block BB. This also means updating
7416 block_for_insn appropriately. */
7418 static void
7419 emit_return_into_block (bb, line_note)
7420 basic_block bb;
7421 rtx line_note;
7423 emit_jump_insn_after (gen_return (), bb->end);
7424 if (line_note)
7425 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7426 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7428 #endif /* HAVE_return */
7430 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7432 /* These functions convert the epilogue into a variant that does not modify the
7433 stack pointer. This is used in cases where a function returns an object
7434 whose size is not known until it is computed. The called function leaves the
7435 object on the stack, leaves the stack depressed, and returns a pointer to
7436 the object.
7438 What we need to do is track all modifications and references to the stack
7439 pointer, deleting the modifications and changing the references to point to
7440 the location the stack pointer would have pointed to had the modifications
7441 taken place.
7443 These functions need to be portable so we need to make as few assumptions
7444 about the epilogue as we can. However, the epilogue basically contains
7445 three things: instructions to reset the stack pointer, instructions to
7446 reload registers, possibly including the frame pointer, and an
7447 instruction to return to the caller.
7449 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7450 We also make no attempt to validate the insns we make since if they are
7451 invalid, we probably can't do anything valid. The intent is that these
7452 routines get "smarter" as more and more machines start to use them and
7453 they try operating on different epilogues.
7455 We use the following structure to track what the part of the epilogue that
7456 we've already processed has done. We keep two copies of the SP equivalence,
7457 one for use during the insn we are processing and one for use in the next
7458 insn. The difference is because one part of a PARALLEL may adjust SP
7459 and the other may use it. */
7461 struct epi_info
7463 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7464 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7465 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7466 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7467 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7468 should be set to once we no longer need
7469 its value. */
7472 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7473 static void emit_equiv_load PARAMS ((struct epi_info *));
7475 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7476 no modifications to the stack pointer. Return the new list of insns. */
7478 static rtx
7479 keep_stack_depressed (insns)
7480 rtx insns;
7482 int j;
7483 struct epi_info info;
7484 rtx insn, next;
7486 /* If the epilogue is just a single instruction, it ust be OK as is. */
7488 if (NEXT_INSN (insns) == NULL_RTX)
7489 return insns;
7491 /* Otherwise, start a sequence, initialize the information we have, and
7492 process all the insns we were given. */
7493 start_sequence ();
7495 info.sp_equiv_reg = stack_pointer_rtx;
7496 info.sp_offset = 0;
7497 info.equiv_reg_src = 0;
7499 insn = insns;
7500 next = NULL_RTX;
7501 while (insn != NULL_RTX)
7503 next = NEXT_INSN (insn);
7505 if (!INSN_P (insn))
7507 add_insn (insn);
7508 insn = next;
7509 continue;
7512 /* If this insn references the register that SP is equivalent to and
7513 we have a pending load to that register, we must force out the load
7514 first and then indicate we no longer know what SP's equivalent is. */
7515 if (info.equiv_reg_src != 0
7516 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7518 emit_equiv_load (&info);
7519 info.sp_equiv_reg = 0;
7522 info.new_sp_equiv_reg = info.sp_equiv_reg;
7523 info.new_sp_offset = info.sp_offset;
7525 /* If this is a (RETURN) and the return address is on the stack,
7526 update the address and change to an indirect jump. */
7527 if (GET_CODE (PATTERN (insn)) == RETURN
7528 || (GET_CODE (PATTERN (insn)) == PARALLEL
7529 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7531 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7532 rtx base = 0;
7533 HOST_WIDE_INT offset = 0;
7534 rtx jump_insn, jump_set;
7536 /* If the return address is in a register, we can emit the insn
7537 unchanged. Otherwise, it must be a MEM and we see what the
7538 base register and offset are. In any case, we have to emit any
7539 pending load to the equivalent reg of SP, if any. */
7540 if (GET_CODE (retaddr) == REG)
7542 emit_equiv_load (&info);
7543 add_insn (insn);
7544 insn = next;
7545 continue;
7547 else if (GET_CODE (retaddr) == MEM
7548 && GET_CODE (XEXP (retaddr, 0)) == REG)
7549 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7550 else if (GET_CODE (retaddr) == MEM
7551 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7552 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7553 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7555 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7556 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7558 else
7559 abort ();
7561 /* If the base of the location containing the return pointer
7562 is SP, we must update it with the replacement address. Otherwise,
7563 just build the necessary MEM. */
7564 retaddr = plus_constant (base, offset);
7565 if (base == stack_pointer_rtx)
7566 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7567 plus_constant (info.sp_equiv_reg,
7568 info.sp_offset));
7570 retaddr = gen_rtx_MEM (Pmode, retaddr);
7572 /* If there is a pending load to the equivalent register for SP
7573 and we reference that register, we must load our address into
7574 a scratch register and then do that load. */
7575 if (info.equiv_reg_src
7576 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7578 unsigned int regno;
7579 rtx reg;
7581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7582 if (HARD_REGNO_MODE_OK (regno, Pmode)
7583 && !fixed_regs[regno]
7584 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7585 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7586 regno)
7587 && !refers_to_regno_p (regno,
7588 regno + HARD_REGNO_NREGS (regno,
7589 Pmode),
7590 info.equiv_reg_src, NULL))
7591 break;
7593 if (regno == FIRST_PSEUDO_REGISTER)
7594 abort ();
7596 reg = gen_rtx_REG (Pmode, regno);
7597 emit_move_insn (reg, retaddr);
7598 retaddr = reg;
7601 emit_equiv_load (&info);
7602 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7604 /* Show the SET in the above insn is a RETURN. */
7605 jump_set = single_set (jump_insn);
7606 if (jump_set == 0)
7607 abort ();
7608 else
7609 SET_IS_RETURN_P (jump_set) = 1;
7612 /* If SP is not mentioned in the pattern and its equivalent register, if
7613 any, is not modified, just emit it. Otherwise, if neither is set,
7614 replace the reference to SP and emit the insn. If none of those are
7615 true, handle each SET individually. */
7616 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7617 && (info.sp_equiv_reg == stack_pointer_rtx
7618 || !reg_set_p (info.sp_equiv_reg, insn)))
7619 add_insn (insn);
7620 else if (! reg_set_p (stack_pointer_rtx, insn)
7621 && (info.sp_equiv_reg == stack_pointer_rtx
7622 || !reg_set_p (info.sp_equiv_reg, insn)))
7624 if (! validate_replace_rtx (stack_pointer_rtx,
7625 plus_constant (info.sp_equiv_reg,
7626 info.sp_offset),
7627 insn))
7628 abort ();
7630 add_insn (insn);
7632 else if (GET_CODE (PATTERN (insn)) == SET)
7633 handle_epilogue_set (PATTERN (insn), &info);
7634 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7636 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7637 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7638 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7640 else
7641 add_insn (insn);
7643 info.sp_equiv_reg = info.new_sp_equiv_reg;
7644 info.sp_offset = info.new_sp_offset;
7646 insn = next;
7649 insns = get_insns ();
7650 end_sequence ();
7651 return insns;
7654 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7655 structure that contains information about what we've seen so far. We
7656 process this SET by either updating that data or by emitting one or
7657 more insns. */
7659 static void
7660 handle_epilogue_set (set, p)
7661 rtx set;
7662 struct epi_info *p;
7664 /* First handle the case where we are setting SP. Record what it is being
7665 set from. If unknown, abort. */
7666 if (reg_set_p (stack_pointer_rtx, set))
7668 if (SET_DEST (set) != stack_pointer_rtx)
7669 abort ();
7671 if (GET_CODE (SET_SRC (set)) == PLUS
7672 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7674 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7675 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7677 else
7678 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7680 /* If we are adjusting SP, we adjust from the old data. */
7681 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7683 p->new_sp_equiv_reg = p->sp_equiv_reg;
7684 p->new_sp_offset += p->sp_offset;
7687 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7688 abort ();
7690 return;
7693 /* Next handle the case where we are setting SP's equivalent register.
7694 If we already have a value to set it to, abort. We could update, but
7695 there seems little point in handling that case. Note that we have
7696 to allow for the case where we are setting the register set in
7697 the previous part of a PARALLEL inside a single insn. But use the
7698 old offset for any updates within this insn. */
7699 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7701 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7702 || p->equiv_reg_src != 0)
7703 abort ();
7704 else
7705 p->equiv_reg_src
7706 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7707 plus_constant (p->sp_equiv_reg,
7708 p->sp_offset));
7711 /* Otherwise, replace any references to SP in the insn to its new value
7712 and emit the insn. */
7713 else
7715 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7716 plus_constant (p->sp_equiv_reg,
7717 p->sp_offset));
7718 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7719 plus_constant (p->sp_equiv_reg,
7720 p->sp_offset));
7721 emit_insn (set);
7725 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7727 static void
7728 emit_equiv_load (p)
7729 struct epi_info *p;
7731 if (p->equiv_reg_src != 0)
7732 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7734 p->equiv_reg_src = 0;
7736 #endif
7738 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7739 this into place with notes indicating where the prologue ends and where
7740 the epilogue begins. Update the basic block information when possible. */
7742 void
7743 thread_prologue_and_epilogue_insns (f)
7744 rtx f ATTRIBUTE_UNUSED;
7746 int inserted = 0;
7747 edge e;
7748 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7749 rtx seq;
7750 #endif
7751 #ifdef HAVE_prologue
7752 rtx prologue_end = NULL_RTX;
7753 #endif
7754 #if defined (HAVE_epilogue) || defined(HAVE_return)
7755 rtx epilogue_end = NULL_RTX;
7756 #endif
7758 #ifdef HAVE_prologue
7759 if (HAVE_prologue)
7761 start_sequence ();
7762 seq = gen_prologue ();
7763 emit_insn (seq);
7765 /* Retain a map of the prologue insns. */
7766 record_insns (seq, &prologue);
7767 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7769 seq = get_insns ();
7770 end_sequence ();
7771 set_insn_locators (seq, prologue_locator);
7773 /* Can't deal with multiple successors of the entry block
7774 at the moment. Function should always have at least one
7775 entry point. */
7776 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7777 abort ();
7779 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7780 inserted = 1;
7782 #endif
7784 /* If the exit block has no non-fake predecessors, we don't need
7785 an epilogue. */
7786 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7787 if ((e->flags & EDGE_FAKE) == 0)
7788 break;
7789 if (e == NULL)
7790 goto epilogue_done;
7792 #ifdef HAVE_return
7793 if (optimize && HAVE_return)
7795 /* If we're allowed to generate a simple return instruction,
7796 then by definition we don't need a full epilogue. Examine
7797 the block that falls through to EXIT. If it does not
7798 contain any code, examine its predecessors and try to
7799 emit (conditional) return instructions. */
7801 basic_block last;
7802 edge e_next;
7803 rtx label;
7805 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7806 if (e->flags & EDGE_FALLTHRU)
7807 break;
7808 if (e == NULL)
7809 goto epilogue_done;
7810 last = e->src;
7812 /* Verify that there are no active instructions in the last block. */
7813 label = last->end;
7814 while (label && GET_CODE (label) != CODE_LABEL)
7816 if (active_insn_p (label))
7817 break;
7818 label = PREV_INSN (label);
7821 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7823 rtx epilogue_line_note = NULL_RTX;
7825 /* Locate the line number associated with the closing brace,
7826 if we can find one. */
7827 for (seq = get_last_insn ();
7828 seq && ! active_insn_p (seq);
7829 seq = PREV_INSN (seq))
7830 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7832 epilogue_line_note = seq;
7833 break;
7836 for (e = last->pred; e; e = e_next)
7838 basic_block bb = e->src;
7839 rtx jump;
7841 e_next = e->pred_next;
7842 if (bb == ENTRY_BLOCK_PTR)
7843 continue;
7845 jump = bb->end;
7846 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7847 continue;
7849 /* If we have an unconditional jump, we can replace that
7850 with a simple return instruction. */
7851 if (simplejump_p (jump))
7853 emit_return_into_block (bb, epilogue_line_note);
7854 delete_insn (jump);
7857 /* If we have a conditional jump, we can try to replace
7858 that with a conditional return instruction. */
7859 else if (condjump_p (jump))
7861 if (! redirect_jump (jump, 0, 0))
7862 continue;
7864 /* If this block has only one successor, it both jumps
7865 and falls through to the fallthru block, so we can't
7866 delete the edge. */
7867 if (bb->succ->succ_next == NULL)
7868 continue;
7870 else
7871 continue;
7873 /* Fix up the CFG for the successful change we just made. */
7874 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7877 /* Emit a return insn for the exit fallthru block. Whether
7878 this is still reachable will be determined later. */
7880 emit_barrier_after (last->end);
7881 emit_return_into_block (last, epilogue_line_note);
7882 epilogue_end = last->end;
7883 last->succ->flags &= ~EDGE_FALLTHRU;
7884 goto epilogue_done;
7887 #endif
7888 #ifdef HAVE_epilogue
7889 if (HAVE_epilogue)
7891 /* Find the edge that falls through to EXIT. Other edges may exist
7892 due to RETURN instructions, but those don't need epilogues.
7893 There really shouldn't be a mixture -- either all should have
7894 been converted or none, however... */
7896 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7897 if (e->flags & EDGE_FALLTHRU)
7898 break;
7899 if (e == NULL)
7900 goto epilogue_done;
7902 start_sequence ();
7903 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7905 seq = gen_epilogue ();
7907 #ifdef INCOMING_RETURN_ADDR_RTX
7908 /* If this function returns with the stack depressed and we can support
7909 it, massage the epilogue to actually do that. */
7910 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7911 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7912 seq = keep_stack_depressed (seq);
7913 #endif
7915 emit_jump_insn (seq);
7917 /* Retain a map of the epilogue insns. */
7918 record_insns (seq, &epilogue);
7919 set_insn_locators (seq, epilogue_locator);
7921 seq = get_insns ();
7922 end_sequence ();
7924 insert_insn_on_edge (seq, e);
7925 inserted = 1;
7927 #endif
7928 epilogue_done:
7930 if (inserted)
7931 commit_edge_insertions ();
7933 #ifdef HAVE_sibcall_epilogue
7934 /* Emit sibling epilogues before any sibling call sites. */
7935 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7937 basic_block bb = e->src;
7938 rtx insn = bb->end;
7939 rtx i;
7940 rtx newinsn;
7942 if (GET_CODE (insn) != CALL_INSN
7943 || ! SIBLING_CALL_P (insn))
7944 continue;
7946 start_sequence ();
7947 emit_insn (gen_sibcall_epilogue ());
7948 seq = get_insns ();
7949 end_sequence ();
7951 /* Retain a map of the epilogue insns. Used in life analysis to
7952 avoid getting rid of sibcall epilogue insns. Do this before we
7953 actually emit the sequence. */
7954 record_insns (seq, &sibcall_epilogue);
7955 set_insn_locators (seq, epilogue_locator);
7957 i = PREV_INSN (insn);
7958 newinsn = emit_insn_before (seq, insn);
7960 #endif
7962 #ifdef HAVE_prologue
7963 if (prologue_end)
7965 rtx insn, prev;
7967 /* GDB handles `break f' by setting a breakpoint on the first
7968 line note after the prologue. Which means (1) that if
7969 there are line number notes before where we inserted the
7970 prologue we should move them, and (2) we should generate a
7971 note before the end of the first basic block, if there isn't
7972 one already there.
7974 ??? This behavior is completely broken when dealing with
7975 multiple entry functions. We simply place the note always
7976 into first basic block and let alternate entry points
7977 to be missed.
7980 for (insn = prologue_end; insn; insn = prev)
7982 prev = PREV_INSN (insn);
7983 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7985 /* Note that we cannot reorder the first insn in the
7986 chain, since rest_of_compilation relies on that
7987 remaining constant. */
7988 if (prev == NULL)
7989 break;
7990 reorder_insns (insn, insn, prologue_end);
7994 /* Find the last line number note in the first block. */
7995 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7996 insn != prologue_end && insn;
7997 insn = PREV_INSN (insn))
7998 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7999 break;
8001 /* If we didn't find one, make a copy of the first line number
8002 we run across. */
8003 if (! insn)
8005 for (insn = next_active_insn (prologue_end);
8006 insn;
8007 insn = PREV_INSN (insn))
8008 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
8010 emit_line_note_after (NOTE_SOURCE_FILE (insn),
8011 NOTE_LINE_NUMBER (insn),
8012 prologue_end);
8013 break;
8017 #endif
8018 #ifdef HAVE_epilogue
8019 if (epilogue_end)
8021 rtx insn, next;
8023 /* Similarly, move any line notes that appear after the epilogue.
8024 There is no need, however, to be quite so anal about the existence
8025 of such a note. */
8026 for (insn = epilogue_end; insn; insn = next)
8028 next = NEXT_INSN (insn);
8029 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
8030 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
8033 #endif
8036 /* Reposition the prologue-end and epilogue-begin notes after instruction
8037 scheduling and delayed branch scheduling. */
8039 void
8040 reposition_prologue_and_epilogue_notes (f)
8041 rtx f ATTRIBUTE_UNUSED;
8043 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
8044 rtx insn, last, note;
8045 int len;
8047 if ((len = VARRAY_SIZE (prologue)) > 0)
8049 last = 0, note = 0;
8051 /* Scan from the beginning until we reach the last prologue insn.
8052 We apparently can't depend on basic_block_{head,end} after
8053 reorg has run. */
8054 for (insn = f; insn; insn = NEXT_INSN (insn))
8056 if (GET_CODE (insn) == NOTE)
8058 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
8059 note = insn;
8061 else if (contains (insn, prologue))
8063 last = insn;
8064 if (--len == 0)
8065 break;
8069 if (last)
8071 /* Find the prologue-end note if we haven't already, and
8072 move it to just after the last prologue insn. */
8073 if (note == 0)
8075 for (note = last; (note = NEXT_INSN (note));)
8076 if (GET_CODE (note) == NOTE
8077 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
8078 break;
8081 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
8082 if (GET_CODE (last) == CODE_LABEL)
8083 last = NEXT_INSN (last);
8084 reorder_insns (note, note, last);
8088 if ((len = VARRAY_SIZE (epilogue)) > 0)
8090 last = 0, note = 0;
8092 /* Scan from the end until we reach the first epilogue insn.
8093 We apparently can't depend on basic_block_{head,end} after
8094 reorg has run. */
8095 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8097 if (GET_CODE (insn) == NOTE)
8099 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
8100 note = insn;
8102 else if (contains (insn, epilogue))
8104 last = insn;
8105 if (--len == 0)
8106 break;
8110 if (last)
8112 /* Find the epilogue-begin note if we haven't already, and
8113 move it to just before the first epilogue insn. */
8114 if (note == 0)
8116 for (note = insn; (note = PREV_INSN (note));)
8117 if (GET_CODE (note) == NOTE
8118 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
8119 break;
8122 if (PREV_INSN (last) != note)
8123 reorder_insns (note, note, PREV_INSN (last));
8126 #endif /* HAVE_prologue or HAVE_epilogue */
8129 /* Called once, at initialization, to initialize function.c. */
8131 void
8132 init_function_once ()
8134 VARRAY_INT_INIT (prologue, 0, "prologue");
8135 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8136 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
8139 #include "gt-function.h"