2005-01-13 Michael Koch <konqueror@gmx.de>
[official-gcc.git] / gcc / function.c
blob8d25bd261d62b7fff633d2e826849a7646855f58
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "optabs.h"
52 #include "libfuncs.h"
53 #include "regs.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
56 #include "recog.h"
57 #include "output.h"
58 #include "basic-block.h"
59 #include "toplev.h"
60 #include "hashtab.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63 #include "integrate.h"
64 #include "langhooks.h"
65 #include "target.h"
66 #include "cfglayout.h"
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
96 during rtl generation. If they are different register numbers, this is
97 always true. It may also be true if
98 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
99 generation. See fix_lexical_addr for details. */
101 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
102 #define NEED_SEPARATE_AP
103 #endif
105 /* Nonzero if function being compiled doesn't contain any calls
106 (ignoring the prologue and epilogue). This is set prior to
107 local register allocation and is valid for the remaining
108 compiler passes. */
109 int current_function_is_leaf;
111 /* Nonzero if function being compiled doesn't contain any instructions
112 that can throw an exception. This is set prior to final. */
114 int current_function_nothrow;
116 /* Nonzero if function being compiled doesn't modify the stack pointer
117 (ignoring the prologue and epilogue). This is only valid after
118 life_analysis has run. */
119 int current_function_sp_is_unchanging;
121 /* Nonzero if the function being compiled is a leaf function which only
122 uses leaf registers. This is valid after reload (specifically after
123 sched2) and is useful only if the port defines LEAF_REGISTERS. */
124 int current_function_uses_only_leaf_regs;
126 /* Nonzero once virtual register instantiation has been done.
127 assign_stack_local uses frame_pointer_rtx when this is nonzero.
128 calls.c:emit_library_call_value_1 uses it to set up
129 post-instantiation libcalls. */
130 int virtuals_instantiated;
132 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
133 static GTY(()) int funcdef_no;
135 /* These variables hold pointers to functions to create and destroy
136 target specific, per-function data structures. */
137 struct machine_function * (*init_machine_status) (void);
139 /* The currently compiled function. */
140 struct function *cfun = 0;
142 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
143 static GTY(()) varray_type prologue;
144 static GTY(()) varray_type epilogue;
146 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
147 in this function. */
148 static GTY(()) varray_type sibcall_epilogue;
150 /* In order to evaluate some expressions, such as function calls returning
151 structures in memory, we need to temporarily allocate stack locations.
152 We record each allocated temporary in the following structure.
154 Associated with each temporary slot is a nesting level. When we pop up
155 one level, all temporaries associated with the previous level are freed.
156 Normally, all temporaries are freed after the execution of the statement
157 in which they were created. However, if we are inside a ({...}) grouping,
158 the result may be in a temporary and hence must be preserved. If the
159 result could be in a temporary, we preserve it if we can determine which
160 one it is in. If we cannot determine which temporary may contain the
161 result, all temporaries are preserved. A temporary is preserved by
162 pretending it was allocated at the previous nesting level.
164 Automatic variables are also assigned temporary slots, at the nesting
165 level where they are defined. They are marked a "kept" so that
166 free_temp_slots will not free them. */
168 struct temp_slot GTY(())
170 /* Points to next temporary slot. */
171 struct temp_slot *next;
172 /* Points to previous temporary slot. */
173 struct temp_slot *prev;
175 /* The rtx to used to reference the slot. */
176 rtx slot;
177 /* The rtx used to represent the address if not the address of the
178 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 rtx address;
180 /* The alignment (in bits) of the slot. */
181 unsigned int align;
182 /* The size, in units, of the slot. */
183 HOST_WIDE_INT size;
184 /* The type of the object in the slot, or zero if it doesn't correspond
185 to a type. We use this to determine whether a slot can be reused.
186 It can be reused if objects of the type of the new slot will always
187 conflict with objects of the type of the old slot. */
188 tree type;
189 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
190 tree rtl_expr;
191 /* Nonzero if this temporary is currently in use. */
192 char in_use;
193 /* Nonzero if this temporary has its address taken. */
194 char addr_taken;
195 /* Nesting level at which this slot is being used. */
196 int level;
197 /* Nonzero if this should survive a call to free_temp_slots. */
198 int keep;
199 /* The offset of the slot from the frame_pointer, including extra space
200 for alignment. This info is for combine_temp_slots. */
201 HOST_WIDE_INT base_offset;
202 /* The size of the slot, including extra space for alignment. This
203 info is for combine_temp_slots. */
204 HOST_WIDE_INT full_size;
207 /* This structure is used to record MEMs or pseudos used to replace VAR, any
208 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
209 maintain this list in case two operands of an insn were required to match;
210 in that case we must ensure we use the same replacement. */
212 struct fixup_replacement GTY(())
214 rtx old;
215 rtx new;
216 struct fixup_replacement *next;
219 struct insns_for_mem_entry
221 /* A MEM. */
222 rtx key;
223 /* These are the INSNs which reference the MEM. */
224 rtx insns;
227 /* Forward declarations. */
229 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
230 struct function *);
231 static struct temp_slot *find_temp_slot_from_address (rtx);
232 static void put_reg_into_stack (struct function *, rtx, tree, enum machine_mode,
233 unsigned int, bool, bool, bool, htab_t);
234 static void schedule_fixup_var_refs (struct function *, rtx, tree, enum machine_mode,
235 htab_t);
236 static void fixup_var_refs (rtx, enum machine_mode, int, rtx, htab_t);
237 static struct fixup_replacement
238 *find_fixup_replacement (struct fixup_replacement **, rtx);
239 static void fixup_var_refs_insns (rtx, rtx, enum machine_mode, int, int, rtx);
240 static void fixup_var_refs_insns_with_hash (htab_t, rtx, enum machine_mode, int, rtx);
241 static void fixup_var_refs_insn (rtx, rtx, enum machine_mode, int, int, rtx);
242 static void fixup_var_refs_1 (rtx, enum machine_mode, rtx *, rtx,
243 struct fixup_replacement **, rtx);
244 static rtx fixup_memory_subreg (rtx, rtx, enum machine_mode, int);
245 static rtx walk_fixup_memory_subreg (rtx, rtx, rtx, enum machine_mode, int);
246 static rtx fixup_stack_1 (rtx, rtx);
247 static void optimize_bit_field (rtx, rtx, rtx *);
248 static void instantiate_decls (tree, int);
249 static void instantiate_decls_1 (tree, int);
250 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
251 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
252 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
253 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
254 static void pad_below (struct args_size *, enum machine_mode, tree);
255 static tree *identify_blocks_1 (rtx, tree *, tree *, tree *);
256 static void reorder_blocks_1 (rtx, tree, varray_type *);
257 static void reorder_fix_fragments (tree);
258 static int all_blocks (tree, tree *);
259 static tree *get_block_vector (tree, int *);
260 extern tree debug_find_var_in_block_tree (tree, tree);
261 /* We always define `record_insns' even if it's not used so that we
262 can always export `prologue_epilogue_contains'. */
263 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
264 static int contains (rtx, varray_type);
265 #ifdef HAVE_return
266 static void emit_return_into_block (basic_block, rtx);
267 #endif
268 static void put_addressof_into_stack (rtx, htab_t);
269 static bool purge_addressof_1 (rtx *, rtx, int, int, int, htab_t);
270 static void purge_single_hard_subreg_set (rtx);
271 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
272 static rtx keep_stack_depressed (rtx);
273 #endif
274 static int is_addressof (rtx *, void *);
275 static hashval_t insns_for_mem_hash (const void *);
276 static int insns_for_mem_comp (const void *, const void *);
277 static int insns_for_mem_walk (rtx *, void *);
278 static void compute_insns_for_mem (rtx, rtx, htab_t);
279 static void prepare_function_start (tree);
280 static void do_clobber_return_reg (rtx, void *);
281 static void do_use_return_reg (rtx, void *);
282 static void instantiate_virtual_regs_lossage (rtx);
283 static tree split_complex_args (tree);
284 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
286 /* Pointer to chain of `struct function' for containing functions. */
287 struct function *outer_function_chain;
289 /* List of insns that were postponed by purge_addressof_1. */
290 static rtx postponed_insns;
292 /* Given a function decl for a containing function,
293 return the `struct function' for it. */
295 struct function *
296 find_function_data (tree decl)
298 struct function *p;
300 for (p = outer_function_chain; p; p = p->outer)
301 if (p->decl == decl)
302 return p;
304 abort ();
307 /* Save the current context for compilation of a nested function.
308 This is called from language-specific code. The caller should use
309 the enter_nested langhook to save any language-specific state,
310 since this function knows only about language-independent
311 variables. */
313 void
314 push_function_context_to (tree context)
316 struct function *p;
318 if (context)
320 if (context == current_function_decl)
321 cfun->contains_functions = 1;
322 else
324 struct function *containing = find_function_data (context);
325 containing->contains_functions = 1;
329 if (cfun == 0)
330 init_dummy_function_start ();
331 p = cfun;
333 p->outer = outer_function_chain;
334 outer_function_chain = p;
335 p->fixup_var_refs_queue = 0;
337 lang_hooks.function.enter_nested (p);
339 cfun = 0;
342 void
343 push_function_context (void)
345 push_function_context_to (current_function_decl);
348 /* Restore the last saved context, at the end of a nested function.
349 This function is called from language-specific code. */
351 void
352 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
354 struct function *p = outer_function_chain;
355 struct var_refs_queue *queue;
357 cfun = p;
358 outer_function_chain = p->outer;
360 current_function_decl = p->decl;
361 reg_renumber = 0;
363 restore_emit_status (p);
365 lang_hooks.function.leave_nested (p);
367 /* Finish doing put_var_into_stack for any of our variables which became
368 addressable during the nested function. If only one entry has to be
369 fixed up, just do that one. Otherwise, first make a list of MEMs that
370 are not to be unshared. */
371 if (p->fixup_var_refs_queue == 0)
373 else if (p->fixup_var_refs_queue->next == 0)
374 fixup_var_refs (p->fixup_var_refs_queue->modified,
375 p->fixup_var_refs_queue->promoted_mode,
376 p->fixup_var_refs_queue->unsignedp,
377 p->fixup_var_refs_queue->modified, 0);
378 else
380 rtx list = 0;
382 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
383 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
385 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
386 fixup_var_refs (queue->modified, queue->promoted_mode,
387 queue->unsignedp, list, 0);
391 p->fixup_var_refs_queue = 0;
393 /* Reset variables that have known state during rtx generation. */
394 rtx_equal_function_value_matters = 1;
395 virtuals_instantiated = 0;
396 generating_concat_p = 1;
399 void
400 pop_function_context (void)
402 pop_function_context_from (current_function_decl);
405 /* Clear out all parts of the state in F that can safely be discarded
406 after the function has been parsed, but not compiled, to let
407 garbage collection reclaim the memory. */
409 void
410 free_after_parsing (struct function *f)
412 /* f->expr->forced_labels is used by code generation. */
413 /* f->emit->regno_reg_rtx is used by code generation. */
414 /* f->varasm is used by code generation. */
415 /* f->eh->eh_return_stub_label is used by code generation. */
417 lang_hooks.function.final (f);
418 f->stmt = NULL;
421 /* Clear out all parts of the state in F that can safely be discarded
422 after the function has been compiled, to let garbage collection
423 reclaim the memory. */
425 void
426 free_after_compilation (struct function *f)
428 f->eh = NULL;
429 f->expr = NULL;
430 f->emit = NULL;
431 f->varasm = NULL;
432 f->machine = NULL;
434 f->x_avail_temp_slots = NULL;
435 f->x_used_temp_slots = NULL;
436 f->arg_offset_rtx = NULL;
437 f->return_rtx = NULL;
438 f->internal_arg_pointer = NULL;
439 f->x_nonlocal_goto_handler_labels = NULL;
440 f->x_cleanup_label = NULL;
441 f->x_return_label = NULL;
442 f->x_naked_return_label = NULL;
443 f->computed_goto_common_label = NULL;
444 f->computed_goto_common_reg = NULL;
445 f->x_save_expr_regs = NULL;
446 f->x_stack_slot_list = NULL;
447 f->x_rtl_expr_chain = NULL;
448 f->x_tail_recursion_reentry = NULL;
449 f->x_arg_pointer_save_area = NULL;
450 f->x_parm_birth_insn = NULL;
451 f->x_last_parm_insn = NULL;
452 f->x_parm_reg_stack_loc = NULL;
453 f->fixup_var_refs_queue = NULL;
454 f->original_arg_vector = NULL;
455 f->original_decl_initial = NULL;
456 f->inl_last_parm_insn = NULL;
457 f->epilogue_delay_list = NULL;
460 /* Allocate fixed slots in the stack frame of the current function. */
462 /* Return size needed for stack frame based on slots so far allocated in
463 function F.
464 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
465 the caller may have to do that. */
467 HOST_WIDE_INT
468 get_func_frame_size (struct function *f)
470 #ifdef FRAME_GROWS_DOWNWARD
471 return -f->x_frame_offset;
472 #else
473 return f->x_frame_offset;
474 #endif
477 /* Return size needed for stack frame based on slots so far allocated.
478 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
479 the caller may have to do that. */
480 HOST_WIDE_INT
481 get_frame_size (void)
483 return get_func_frame_size (cfun);
486 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
487 with machine mode MODE.
489 ALIGN controls the amount of alignment for the address of the slot:
490 0 means according to MODE,
491 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
492 -2 means use BITS_PER_UNIT,
493 positive specifies alignment boundary in bits.
495 We do not round to stack_boundary here.
497 FUNCTION specifies the function to allocate in. */
499 static rtx
500 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
501 struct function *function)
503 rtx x, addr;
504 int bigend_correction = 0;
505 int alignment;
506 int frame_off, frame_alignment, frame_phase;
508 if (align == 0)
510 tree type;
512 if (mode == BLKmode)
513 alignment = BIGGEST_ALIGNMENT;
514 else
515 alignment = GET_MODE_ALIGNMENT (mode);
517 /* Allow the target to (possibly) increase the alignment of this
518 stack slot. */
519 type = lang_hooks.types.type_for_mode (mode, 0);
520 if (type)
521 alignment = LOCAL_ALIGNMENT (type, alignment);
523 alignment /= BITS_PER_UNIT;
525 else if (align == -1)
527 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
528 size = CEIL_ROUND (size, alignment);
530 else if (align == -2)
531 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
532 else
533 alignment = align / BITS_PER_UNIT;
535 #ifdef FRAME_GROWS_DOWNWARD
536 function->x_frame_offset -= size;
537 #endif
539 /* Ignore alignment we can't do with expected alignment of the boundary. */
540 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
541 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
543 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
544 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
546 /* Calculate how many bytes the start of local variables is off from
547 stack alignment. */
548 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
549 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
550 frame_phase = frame_off ? frame_alignment - frame_off : 0;
552 /* Round the frame offset to the specified alignment. The default is
553 to always honor requests to align the stack but a port may choose to
554 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
555 if (STACK_ALIGNMENT_NEEDED
556 || mode != BLKmode
557 || size != 0)
559 /* We must be careful here, since FRAME_OFFSET might be negative and
560 division with a negative dividend isn't as well defined as we might
561 like. So we instead assume that ALIGNMENT is a power of two and
562 use logical operations which are unambiguous. */
563 #ifdef FRAME_GROWS_DOWNWARD
564 function->x_frame_offset
565 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
566 + frame_phase);
567 #else
568 function->x_frame_offset
569 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
570 + frame_phase);
571 #endif
574 /* On a big-endian machine, if we are allocating more space than we will use,
575 use the least significant bytes of those that are allocated. */
576 if (BYTES_BIG_ENDIAN && mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (function == cfun && virtuals_instantiated)
582 addr = plus_constant (frame_pointer_rtx,
583 trunc_int_for_mode
584 (frame_offset + bigend_correction
585 + STARTING_FRAME_OFFSET, Pmode));
586 else
587 addr = plus_constant (virtual_stack_vars_rtx,
588 trunc_int_for_mode
589 (function->x_frame_offset + bigend_correction,
590 Pmode));
592 #ifndef FRAME_GROWS_DOWNWARD
593 function->x_frame_offset += size;
594 #endif
596 x = gen_rtx_MEM (mode, addr);
598 function->x_stack_slot_list
599 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
601 return x;
604 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
605 current function. */
608 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
610 return assign_stack_local_1 (mode, size, align, cfun);
614 /* Removes temporary slot TEMP from LIST. */
616 static void
617 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
619 if (temp->next)
620 temp->next->prev = temp->prev;
621 if (temp->prev)
622 temp->prev->next = temp->next;
623 else
624 *list = temp->next;
626 temp->prev = temp->next = NULL;
629 /* Inserts temporary slot TEMP to LIST. */
631 static void
632 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
634 temp->next = *list;
635 if (*list)
636 (*list)->prev = temp;
637 temp->prev = NULL;
638 *list = temp;
641 /* Returns the list of used temp slots at LEVEL. */
643 static struct temp_slot **
644 temp_slots_at_level (int level)
646 level++;
648 if (!used_temp_slots)
649 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
651 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
652 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
654 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
657 /* Returns the maximal temporary slot level. */
659 static int
660 max_slot_level (void)
662 if (!used_temp_slots)
663 return -1;
665 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
668 /* Moves temporary slot TEMP to LEVEL. */
670 static void
671 move_slot_to_level (struct temp_slot *temp, int level)
673 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
674 insert_slot_to_list (temp, temp_slots_at_level (level));
675 temp->level = level;
678 /* Make temporary slot TEMP available. */
680 static void
681 make_slot_available (struct temp_slot *temp)
683 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
684 insert_slot_to_list (temp, &avail_temp_slots);
685 temp->in_use = 0;
686 temp->level = -1;
689 /* Allocate a temporary stack slot and record it for possible later
690 reuse.
692 MODE is the machine mode to be given to the returned rtx.
694 SIZE is the size in units of the space required. We do no rounding here
695 since assign_stack_local will do any required rounding.
697 KEEP is 1 if this slot is to be retained after a call to
698 free_temp_slots. Automatic variables for a block are allocated
699 with this flag. KEEP is 2 if we allocate a longer term temporary,
700 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
701 if we are to allocate something at an inner level to be treated as
702 a variable in the block (e.g., a SAVE_EXPR).
704 TYPE is the type that will be used for the stack slot. */
707 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
708 tree type)
710 unsigned int align;
711 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
712 rtx slot;
714 /* If SIZE is -1 it means that somebody tried to allocate a temporary
715 of a variable size. */
716 if (size == -1)
717 abort ();
719 if (mode == BLKmode)
720 align = BIGGEST_ALIGNMENT;
721 else
722 align = GET_MODE_ALIGNMENT (mode);
724 if (! type)
725 type = lang_hooks.types.type_for_mode (mode, 0);
727 if (type)
728 align = LOCAL_ALIGNMENT (type, align);
730 /* Try to find an available, already-allocated temporary of the proper
731 mode which meets the size and alignment requirements. Choose the
732 smallest one with the closest alignment. */
733 for (p = avail_temp_slots; p; p = p->next)
735 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
736 && objects_must_conflict_p (p->type, type)
737 && (best_p == 0 || best_p->size > p->size
738 || (best_p->size == p->size && best_p->align > p->align)))
740 if (p->align == align && p->size == size)
742 selected = p;
743 cut_slot_from_list (selected, &avail_temp_slots);
744 best_p = 0;
745 break;
747 best_p = p;
751 /* Make our best, if any, the one to use. */
752 if (best_p)
754 selected = best_p;
755 cut_slot_from_list (selected, &avail_temp_slots);
757 /* If there are enough aligned bytes left over, make them into a new
758 temp_slot so that the extra bytes don't get wasted. Do this only
759 for BLKmode slots, so that we can be sure of the alignment. */
760 if (GET_MODE (best_p->slot) == BLKmode)
762 int alignment = best_p->align / BITS_PER_UNIT;
763 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
765 if (best_p->size - rounded_size >= alignment)
767 p = ggc_alloc (sizeof (struct temp_slot));
768 p->in_use = p->addr_taken = 0;
769 p->size = best_p->size - rounded_size;
770 p->base_offset = best_p->base_offset + rounded_size;
771 p->full_size = best_p->full_size - rounded_size;
772 p->slot = gen_rtx_MEM (BLKmode,
773 plus_constant (XEXP (best_p->slot, 0),
774 rounded_size));
775 p->align = best_p->align;
776 p->address = 0;
777 p->rtl_expr = 0;
778 p->type = best_p->type;
779 insert_slot_to_list (p, &avail_temp_slots);
781 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
782 stack_slot_list);
784 best_p->size = rounded_size;
785 best_p->full_size = rounded_size;
790 /* If we still didn't find one, make a new temporary. */
791 if (selected == 0)
793 HOST_WIDE_INT frame_offset_old = frame_offset;
795 p = ggc_alloc (sizeof (struct temp_slot));
797 /* We are passing an explicit alignment request to assign_stack_local.
798 One side effect of that is assign_stack_local will not round SIZE
799 to ensure the frame offset remains suitably aligned.
801 So for requests which depended on the rounding of SIZE, we go ahead
802 and round it now. We also make sure ALIGNMENT is at least
803 BIGGEST_ALIGNMENT. */
804 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
805 abort ();
806 p->slot = assign_stack_local (mode,
807 (mode == BLKmode
808 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
809 : size),
810 align);
812 p->align = align;
814 /* The following slot size computation is necessary because we don't
815 know the actual size of the temporary slot until assign_stack_local
816 has performed all the frame alignment and size rounding for the
817 requested temporary. Note that extra space added for alignment
818 can be either above or below this stack slot depending on which
819 way the frame grows. We include the extra space if and only if it
820 is above this slot. */
821 #ifdef FRAME_GROWS_DOWNWARD
822 p->size = frame_offset_old - frame_offset;
823 #else
824 p->size = size;
825 #endif
827 /* Now define the fields used by combine_temp_slots. */
828 #ifdef FRAME_GROWS_DOWNWARD
829 p->base_offset = frame_offset;
830 p->full_size = frame_offset_old - frame_offset;
831 #else
832 p->base_offset = frame_offset_old;
833 p->full_size = frame_offset - frame_offset_old;
834 #endif
835 p->address = 0;
837 selected = p;
840 p = selected;
841 p->in_use = 1;
842 p->addr_taken = 0;
843 p->rtl_expr = seq_rtl_expr;
844 p->type = type;
846 if (keep == 2)
848 p->level = target_temp_slot_level;
849 p->keep = 1;
851 else if (keep == 3)
853 p->level = var_temp_slot_level;
854 p->keep = 0;
856 else
858 p->level = temp_slot_level;
859 p->keep = keep;
862 pp = temp_slots_at_level (p->level);
863 insert_slot_to_list (p, pp);
865 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
866 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
867 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
869 /* If we know the alias set for the memory that will be used, use
870 it. If there's no TYPE, then we don't know anything about the
871 alias set for the memory. */
872 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
873 set_mem_align (slot, align);
875 /* If a type is specified, set the relevant flags. */
876 if (type != 0)
878 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
879 && TYPE_READONLY (type));
880 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
881 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
884 return slot;
887 /* Allocate a temporary stack slot and record it for possible later
888 reuse. First three arguments are same as in preceding function. */
891 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
893 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
896 /* Assign a temporary.
897 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
898 and so that should be used in error messages. In either case, we
899 allocate of the given type.
900 KEEP is as for assign_stack_temp.
901 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
902 it is 0 if a register is OK.
903 DONT_PROMOTE is 1 if we should not promote values in register
904 to wider modes. */
907 assign_temp (tree type_or_decl, int keep, int memory_required,
908 int dont_promote ATTRIBUTE_UNUSED)
910 tree type, decl;
911 enum machine_mode mode;
912 #ifdef PROMOTE_MODE
913 int unsignedp;
914 #endif
916 if (DECL_P (type_or_decl))
917 decl = type_or_decl, type = TREE_TYPE (decl);
918 else
919 decl = NULL, type = type_or_decl;
921 mode = TYPE_MODE (type);
922 #ifdef PROMOTE_MODE
923 unsignedp = TYPE_UNSIGNED (type);
924 #endif
926 if (mode == BLKmode || memory_required)
928 HOST_WIDE_INT size = int_size_in_bytes (type);
929 rtx tmp;
931 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
932 problems with allocating the stack space. */
933 if (size == 0)
934 size = 1;
936 /* Unfortunately, we don't yet know how to allocate variable-sized
937 temporaries. However, sometimes we have a fixed upper limit on
938 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
939 instead. This is the case for Chill variable-sized strings. */
940 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
941 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
942 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
943 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
945 /* The size of the temporary may be too large to fit into an integer. */
946 /* ??? Not sure this should happen except for user silliness, so limit
947 this to things that aren't compiler-generated temporaries. The
948 rest of the time we'll abort in assign_stack_temp_for_type. */
949 if (decl && size == -1
950 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
952 error ("%Jsize of variable '%D' is too large", decl, decl);
953 size = 1;
956 tmp = assign_stack_temp_for_type (mode, size, keep, type);
957 return tmp;
960 #ifdef PROMOTE_MODE
961 if (! dont_promote)
962 mode = promote_mode (type, mode, &unsignedp, 0);
963 #endif
965 return gen_reg_rtx (mode);
968 /* Combine temporary stack slots which are adjacent on the stack.
970 This allows for better use of already allocated stack space. This is only
971 done for BLKmode slots because we can be sure that we won't have alignment
972 problems in this case. */
974 void
975 combine_temp_slots (void)
977 struct temp_slot *p, *q, *next, *next_q;
978 int num_slots;
980 /* We can't combine slots, because the information about which slot
981 is in which alias set will be lost. */
982 if (flag_strict_aliasing)
983 return;
985 /* If there are a lot of temp slots, don't do anything unless
986 high levels of optimization. */
987 if (! flag_expensive_optimizations)
988 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
989 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
990 return;
992 for (p = avail_temp_slots; p; p = next)
994 int delete_p = 0;
996 next = p->next;
998 if (GET_MODE (p->slot) != BLKmode)
999 continue;
1001 for (q = p->next; q; q = next_q)
1003 int delete_q = 0;
1005 next_q = q->next;
1007 if (GET_MODE (q->slot) != BLKmode)
1008 continue;
1010 if (p->base_offset + p->full_size == q->base_offset)
1012 /* Q comes after P; combine Q into P. */
1013 p->size += q->size;
1014 p->full_size += q->full_size;
1015 delete_q = 1;
1017 else if (q->base_offset + q->full_size == p->base_offset)
1019 /* P comes after Q; combine P into Q. */
1020 q->size += p->size;
1021 q->full_size += p->full_size;
1022 delete_p = 1;
1023 break;
1025 if (delete_q)
1026 cut_slot_from_list (q, &avail_temp_slots);
1029 /* Either delete P or advance past it. */
1030 if (delete_p)
1031 cut_slot_from_list (p, &avail_temp_slots);
1035 /* Find the temp slot corresponding to the object at address X. */
1037 static struct temp_slot *
1038 find_temp_slot_from_address (rtx x)
1040 struct temp_slot *p;
1041 rtx next;
1042 int i;
1044 for (i = max_slot_level (); i >= 0; i--)
1045 for (p = *temp_slots_at_level (i); p; p = p->next)
1047 if (XEXP (p->slot, 0) == x
1048 || p->address == x
1049 || (GET_CODE (x) == PLUS
1050 && XEXP (x, 0) == virtual_stack_vars_rtx
1051 && GET_CODE (XEXP (x, 1)) == CONST_INT
1052 && INTVAL (XEXP (x, 1)) >= p->base_offset
1053 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1054 return p;
1056 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1057 for (next = p->address; next; next = XEXP (next, 1))
1058 if (XEXP (next, 0) == x)
1059 return p;
1062 /* If we have a sum involving a register, see if it points to a temp
1063 slot. */
1064 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
1065 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1066 return p;
1067 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
1068 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1069 return p;
1071 return 0;
1074 /* Indicate that NEW is an alternate way of referring to the temp slot
1075 that previously was known by OLD. */
1077 void
1078 update_temp_slot_address (rtx old, rtx new)
1080 struct temp_slot *p;
1082 if (rtx_equal_p (old, new))
1083 return;
1085 p = find_temp_slot_from_address (old);
1087 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1088 is a register, see if one operand of the PLUS is a temporary
1089 location. If so, NEW points into it. Otherwise, if both OLD and
1090 NEW are a PLUS and if there is a register in common between them.
1091 If so, try a recursive call on those values. */
1092 if (p == 0)
1094 if (GET_CODE (old) != PLUS)
1095 return;
1097 if (REG_P (new))
1099 update_temp_slot_address (XEXP (old, 0), new);
1100 update_temp_slot_address (XEXP (old, 1), new);
1101 return;
1103 else if (GET_CODE (new) != PLUS)
1104 return;
1106 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1107 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1108 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1109 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1110 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1111 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1112 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1113 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1115 return;
1118 /* Otherwise add an alias for the temp's address. */
1119 else if (p->address == 0)
1120 p->address = new;
1121 else
1123 if (GET_CODE (p->address) != EXPR_LIST)
1124 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1126 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1130 /* If X could be a reference to a temporary slot, mark the fact that its
1131 address was taken. */
1133 void
1134 mark_temp_addr_taken (rtx x)
1136 struct temp_slot *p;
1138 if (x == 0)
1139 return;
1141 /* If X is not in memory or is at a constant address, it cannot be in
1142 a temporary slot. */
1143 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1144 return;
1146 p = find_temp_slot_from_address (XEXP (x, 0));
1147 if (p != 0)
1148 p->addr_taken = 1;
1151 /* If X could be a reference to a temporary slot, mark that slot as
1152 belonging to the to one level higher than the current level. If X
1153 matched one of our slots, just mark that one. Otherwise, we can't
1154 easily predict which it is, so upgrade all of them. Kept slots
1155 need not be touched.
1157 This is called when an ({...}) construct occurs and a statement
1158 returns a value in memory. */
1160 void
1161 preserve_temp_slots (rtx x)
1163 struct temp_slot *p = 0, *next;
1165 /* If there is no result, we still might have some objects whose address
1166 were taken, so we need to make sure they stay around. */
1167 if (x == 0)
1169 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1171 next = p->next;
1173 if (p->addr_taken)
1174 move_slot_to_level (p, temp_slot_level - 1);
1177 return;
1180 /* If X is a register that is being used as a pointer, see if we have
1181 a temporary slot we know it points to. To be consistent with
1182 the code below, we really should preserve all non-kept slots
1183 if we can't find a match, but that seems to be much too costly. */
1184 if (REG_P (x) && REG_POINTER (x))
1185 p = find_temp_slot_from_address (x);
1187 /* If X is not in memory or is at a constant address, it cannot be in
1188 a temporary slot, but it can contain something whose address was
1189 taken. */
1190 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1192 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1194 next = p->next;
1196 if (p->addr_taken)
1197 move_slot_to_level (p, temp_slot_level - 1);
1200 return;
1203 /* First see if we can find a match. */
1204 if (p == 0)
1205 p = find_temp_slot_from_address (XEXP (x, 0));
1207 if (p != 0)
1209 /* Move everything at our level whose address was taken to our new
1210 level in case we used its address. */
1211 struct temp_slot *q;
1213 if (p->level == temp_slot_level)
1215 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1217 next = q->next;
1219 if (p != q && q->addr_taken)
1220 move_slot_to_level (q, temp_slot_level - 1);
1223 move_slot_to_level (p, temp_slot_level - 1);
1224 p->addr_taken = 0;
1226 return;
1229 /* Otherwise, preserve all non-kept slots at this level. */
1230 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1232 next = p->next;
1234 if (!p->keep)
1235 move_slot_to_level (p, temp_slot_level - 1);
1239 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1240 with that RTL_EXPR, promote it into a temporary slot at the present
1241 level so it will not be freed when we free slots made in the
1242 RTL_EXPR. */
1244 void
1245 preserve_rtl_expr_result (rtx x)
1247 struct temp_slot *p;
1249 /* If X is not in memory or is at a constant address, it cannot be in
1250 a temporary slot. */
1251 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1252 return;
1254 /* If we can find a match, move it to our level unless it is already at
1255 an upper level. */
1256 p = find_temp_slot_from_address (XEXP (x, 0));
1257 if (p != 0)
1259 move_slot_to_level (p, MIN (p->level, temp_slot_level));
1260 p->rtl_expr = 0;
1263 return;
1266 /* Free all temporaries used so far. This is normally called at the end
1267 of generating code for a statement. Don't free any temporaries
1268 currently in use for an RTL_EXPR that hasn't yet been emitted.
1269 We could eventually do better than this since it can be reused while
1270 generating the same RTL_EXPR, but this is complex and probably not
1271 worthwhile. */
1273 void
1274 free_temp_slots (void)
1276 struct temp_slot *p, *next;
1278 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1280 next = p->next;
1282 if (!p->keep && p->rtl_expr == 0)
1283 make_slot_available (p);
1286 combine_temp_slots ();
1289 /* Free all temporary slots used in T, an RTL_EXPR node. */
1291 void
1292 free_temps_for_rtl_expr (tree t)
1294 struct temp_slot *p, *next;
1296 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1298 next = p->next;
1300 if (p->rtl_expr == t)
1302 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1303 needs to be preserved. This can happen if a temporary in
1304 the RTL_EXPR was addressed; preserve_temp_slots will move
1305 the temporary into a higher level. */
1306 if (temp_slot_level <= p->level)
1307 make_slot_available (p);
1308 else
1309 p->rtl_expr = NULL_TREE;
1313 combine_temp_slots ();
1316 /* Push deeper into the nesting level for stack temporaries. */
1318 void
1319 push_temp_slots (void)
1321 temp_slot_level++;
1324 /* Pop a temporary nesting level. All slots in use in the current level
1325 are freed. */
1327 void
1328 pop_temp_slots (void)
1330 struct temp_slot *p, *next;
1332 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1334 next = p->next;
1336 if (p->rtl_expr == 0)
1337 make_slot_available (p);
1340 combine_temp_slots ();
1342 temp_slot_level--;
1345 /* Initialize temporary slots. */
1347 void
1348 init_temp_slots (void)
1350 /* We have not allocated any temporaries yet. */
1351 avail_temp_slots = 0;
1352 used_temp_slots = 0;
1353 temp_slot_level = 0;
1354 var_temp_slot_level = 0;
1355 target_temp_slot_level = 0;
1358 /* Retroactively move an auto variable from a register to a stack
1359 slot. This is done when an address-reference to the variable is
1360 seen. If RESCAN is true, all previously emitted instructions are
1361 examined and modified to handle the fact that DECL is now
1362 addressable. */
1364 void
1365 put_var_into_stack (tree decl, int rescan)
1367 rtx orig_reg, reg;
1368 enum machine_mode promoted_mode, decl_mode;
1369 struct function *function = 0;
1370 tree context;
1371 bool can_use_addressof_p;
1372 bool volatile_p = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1373 bool used_p = (TREE_USED (decl)
1374 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1376 context = decl_function_context (decl);
1378 /* Get the current rtl used for this object and its original mode. */
1379 orig_reg = reg = (TREE_CODE (decl) == SAVE_EXPR
1380 ? SAVE_EXPR_RTL (decl)
1381 : DECL_RTL_IF_SET (decl));
1383 /* No need to do anything if decl has no rtx yet
1384 since in that case caller is setting TREE_ADDRESSABLE
1385 and a stack slot will be assigned when the rtl is made. */
1386 if (reg == 0)
1387 return;
1389 /* Get the declared mode for this object. */
1390 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1391 : DECL_MODE (decl));
1392 /* Get the mode it's actually stored in. */
1393 promoted_mode = GET_MODE (reg);
1395 /* If this variable comes from an outer function, find that
1396 function's saved context. Don't use find_function_data here,
1397 because it might not be in any active function.
1398 FIXME: Is that really supposed to happen?
1399 It does in ObjC at least. */
1400 if (context != current_function_decl)
1401 for (function = outer_function_chain; function; function = function->outer)
1402 if (function->decl == context)
1403 break;
1405 /* If this is a variable-sized object or a structure passed by invisible
1406 reference, with a pseudo to address it, put that pseudo into the stack
1407 if the var is non-local. */
1408 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1409 && GET_CODE (reg) == MEM
1410 && REG_P (XEXP (reg, 0))
1411 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1413 orig_reg = reg = XEXP (reg, 0);
1414 decl_mode = promoted_mode = GET_MODE (reg);
1417 /* If this variable lives in the current function and we don't need to put it
1418 in the stack for the sake of setjmp or the non-locality, try to keep it in
1419 a register until we know we actually need the address. */
1420 can_use_addressof_p
1421 = (function == 0
1422 && ! (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl))
1423 && optimize > 0
1424 /* FIXME make it work for promoted modes too */
1425 && decl_mode == promoted_mode
1426 #ifdef NON_SAVING_SETJMP
1427 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1428 #endif
1431 /* If we can't use ADDRESSOF, make sure we see through one we already
1432 generated. */
1433 if (! can_use_addressof_p
1434 && GET_CODE (reg) == MEM
1435 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1436 reg = XEXP (XEXP (reg, 0), 0);
1438 /* Now we should have a value that resides in one or more pseudo regs. */
1440 if (REG_P (reg))
1442 if (can_use_addressof_p)
1443 gen_mem_addressof (reg, decl, rescan);
1444 else
1445 put_reg_into_stack (function, reg, TREE_TYPE (decl), decl_mode,
1446 0, volatile_p, used_p, false, 0);
1448 /* If this was previously a MEM but we've removed the ADDRESSOF,
1449 set this address into that MEM so we always use the same
1450 rtx for this variable. */
1451 if (orig_reg != reg && GET_CODE (orig_reg) == MEM)
1452 XEXP (orig_reg, 0) = XEXP (reg, 0);
1454 else if (GET_CODE (reg) == CONCAT)
1456 /* A CONCAT contains two pseudos; put them both in the stack.
1457 We do it so they end up consecutive.
1458 We fixup references to the parts only after we fixup references
1459 to the whole CONCAT, lest we do double fixups for the latter
1460 references. */
1461 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1462 tree part_type = lang_hooks.types.type_for_mode (part_mode, 0);
1463 rtx lopart = XEXP (reg, 0);
1464 rtx hipart = XEXP (reg, 1);
1465 #ifdef FRAME_GROWS_DOWNWARD
1466 /* Since part 0 should have a lower address, do it second. */
1467 put_reg_into_stack (function, hipart, part_type, part_mode,
1468 0, volatile_p, false, false, 0);
1469 put_reg_into_stack (function, lopart, part_type, part_mode,
1470 0, volatile_p, false, true, 0);
1471 #else
1472 put_reg_into_stack (function, lopart, part_type, part_mode,
1473 0, volatile_p, false, false, 0);
1474 put_reg_into_stack (function, hipart, part_type, part_mode,
1475 0, volatile_p, false, true, 0);
1476 #endif
1478 /* Change the CONCAT into a combined MEM for both parts. */
1479 PUT_CODE (reg, MEM);
1480 MEM_ATTRS (reg) = 0;
1482 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1483 already computed alias sets. Here we want to re-generate. */
1484 if (DECL_P (decl))
1485 SET_DECL_RTL (decl, NULL);
1486 set_mem_attributes (reg, decl, 1);
1487 if (DECL_P (decl))
1488 SET_DECL_RTL (decl, reg);
1490 /* The two parts are in memory order already.
1491 Use the lower parts address as ours. */
1492 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1493 /* Prevent sharing of rtl that might lose. */
1494 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1495 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1496 if (used_p && rescan)
1498 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1499 promoted_mode, 0);
1500 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1501 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1504 else
1505 return;
1508 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1509 into the stack frame of FUNCTION (0 means the current function).
1510 TYPE is the user-level data type of the value hold in the register.
1511 DECL_MODE is the machine mode of the user-level data type.
1512 ORIGINAL_REGNO must be set if the real regno is not visible in REG.
1513 VOLATILE_P is true if this is for a "volatile" decl.
1514 USED_P is true if this reg might have already been used in an insn.
1515 CONSECUTIVE_P is true if the stack slot assigned to reg must be
1516 consecutive with the previous stack slot. */
1518 static void
1519 put_reg_into_stack (struct function *function, rtx reg, tree type,
1520 enum machine_mode decl_mode, unsigned int original_regno,
1521 bool volatile_p, bool used_p, bool consecutive_p,
1522 htab_t ht)
1524 struct function *func = function ? function : cfun;
1525 enum machine_mode mode = GET_MODE (reg);
1526 unsigned int regno = original_regno;
1527 rtx new = 0;
1529 if (regno == 0)
1530 regno = REGNO (reg);
1532 if (regno < func->x_max_parm_reg)
1534 if (!func->x_parm_reg_stack_loc)
1535 abort ();
1536 new = func->x_parm_reg_stack_loc[regno];
1539 if (new == 0)
1540 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode),
1541 consecutive_p ? -2 : 0, func);
1543 PUT_CODE (reg, MEM);
1544 PUT_MODE (reg, decl_mode);
1545 XEXP (reg, 0) = XEXP (new, 0);
1546 MEM_ATTRS (reg) = 0;
1547 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1548 MEM_VOLATILE_P (reg) = volatile_p;
1550 /* If this is a memory ref that contains aggregate components,
1551 mark it as such for cse and loop optimize. If we are reusing a
1552 previously generated stack slot, then we need to copy the bit in
1553 case it was set for other reasons. For instance, it is set for
1554 __builtin_va_alist. */
1555 if (type)
1557 MEM_SET_IN_STRUCT_P (reg,
1558 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1559 set_mem_alias_set (reg, get_alias_set (type));
1562 if (used_p)
1563 schedule_fixup_var_refs (function, reg, type, mode, ht);
1566 /* Make sure that all refs to the variable, previously made
1567 when it was a register, are fixed up to be valid again.
1568 See function above for meaning of arguments. */
1570 static void
1571 schedule_fixup_var_refs (struct function *function, rtx reg, tree type,
1572 enum machine_mode promoted_mode, htab_t ht)
1574 int unsigned_p = type ? TYPE_UNSIGNED (type) : 0;
1576 if (function != 0)
1578 struct var_refs_queue *temp;
1580 temp = ggc_alloc (sizeof (struct var_refs_queue));
1581 temp->modified = reg;
1582 temp->promoted_mode = promoted_mode;
1583 temp->unsignedp = unsigned_p;
1584 temp->next = function->fixup_var_refs_queue;
1585 function->fixup_var_refs_queue = temp;
1587 else
1588 /* Variable is local; fix it up now. */
1589 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1592 static void
1593 fixup_var_refs (rtx var, enum machine_mode promoted_mode, int unsignedp,
1594 rtx may_share, htab_t ht)
1596 tree pending;
1597 rtx first_insn = get_insns ();
1598 struct sequence_stack *stack = seq_stack;
1599 tree rtl_exps = rtl_expr_chain;
1600 int save_volatile_ok = volatile_ok;
1602 /* If there's a hash table, it must record all uses of VAR. */
1603 if (ht)
1605 if (stack != 0)
1606 abort ();
1607 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1608 may_share);
1609 return;
1612 /* Volatile is valid in MEMs because all we're doing in changing the
1613 address inside. */
1614 volatile_ok = 1;
1615 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1616 stack == 0, may_share);
1618 /* Scan all pending sequences too. */
1619 for (; stack; stack = stack->next)
1621 push_to_full_sequence (stack->first, stack->last);
1622 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1623 stack->next != 0, may_share);
1624 /* Update bounds of sequence in case we added insns. */
1625 stack->first = get_insns ();
1626 stack->last = get_last_insn ();
1627 end_sequence ();
1630 /* Scan all waiting RTL_EXPRs too. */
1631 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1633 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1634 if (seq != const0_rtx && seq != 0)
1636 push_to_sequence (seq);
1637 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1638 may_share);
1639 end_sequence ();
1643 volatile_ok = save_volatile_ok;
1646 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1647 some part of an insn. Return a struct fixup_replacement whose OLD
1648 value is equal to X. Allocate a new structure if no such entry exists. */
1650 static struct fixup_replacement *
1651 find_fixup_replacement (struct fixup_replacement **replacements, rtx x)
1653 struct fixup_replacement *p;
1655 /* See if we have already replaced this. */
1656 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1659 if (p == 0)
1661 p = xmalloc (sizeof (struct fixup_replacement));
1662 p->old = x;
1663 p->new = 0;
1664 p->next = *replacements;
1665 *replacements = p;
1668 return p;
1671 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1672 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1673 for the current function. MAY_SHARE is either a MEM that is not
1674 to be unshared or a list of them. */
1676 static void
1677 fixup_var_refs_insns (rtx insn, rtx var, enum machine_mode promoted_mode,
1678 int unsignedp, int toplevel, rtx may_share)
1680 while (insn)
1682 /* fixup_var_refs_insn might modify insn, so save its next
1683 pointer now. */
1684 rtx next = NEXT_INSN (insn);
1686 if (INSN_P (insn))
1687 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1688 may_share);
1690 insn = next;
1694 /* Look up the insns which reference VAR in HT and fix them up. Other
1695 arguments are the same as fixup_var_refs_insns. */
1697 static void
1698 fixup_var_refs_insns_with_hash (htab_t ht, rtx var, enum machine_mode promoted_mode,
1699 int unsignedp, rtx may_share)
1701 struct insns_for_mem_entry tmp;
1702 struct insns_for_mem_entry *ime;
1703 rtx insn_list;
1705 tmp.key = var;
1706 ime = htab_find (ht, &tmp);
1707 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1708 if (INSN_P (XEXP (insn_list, 0)))
1709 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1710 unsignedp, 1, may_share);
1714 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1715 the insn under examination, VAR is the variable to fix up
1716 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1717 TOPLEVEL is nonzero if this is the main insn chain for this
1718 function. */
1720 static void
1721 fixup_var_refs_insn (rtx insn, rtx var, enum machine_mode promoted_mode,
1722 int unsignedp, int toplevel, rtx no_share)
1724 rtx call_dest = 0;
1725 rtx set, prev, prev_set;
1726 rtx note;
1728 /* Remember the notes in case we delete the insn. */
1729 note = REG_NOTES (insn);
1731 /* If this is a CLOBBER of VAR, delete it.
1733 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1734 and REG_RETVAL notes too. */
1735 if (GET_CODE (PATTERN (insn)) == CLOBBER
1736 && (XEXP (PATTERN (insn), 0) == var
1737 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1738 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1739 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1741 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1742 /* The REG_LIBCALL note will go away since we are going to
1743 turn INSN into a NOTE, so just delete the
1744 corresponding REG_RETVAL note. */
1745 remove_note (XEXP (note, 0),
1746 find_reg_note (XEXP (note, 0), REG_RETVAL,
1747 NULL_RTX));
1749 delete_insn (insn);
1752 /* The insn to load VAR from a home in the arglist
1753 is now a no-op. When we see it, just delete it.
1754 Similarly if this is storing VAR from a register from which
1755 it was loaded in the previous insn. This will occur
1756 when an ADDRESSOF was made for an arglist slot. */
1757 else if (toplevel
1758 && (set = single_set (insn)) != 0
1759 && SET_DEST (set) == var
1760 /* If this represents the result of an insn group,
1761 don't delete the insn. */
1762 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1763 && (rtx_equal_p (SET_SRC (set), var)
1764 || (REG_P (SET_SRC (set))
1765 && (prev = prev_nonnote_insn (insn)) != 0
1766 && (prev_set = single_set (prev)) != 0
1767 && SET_DEST (prev_set) == SET_SRC (set)
1768 && rtx_equal_p (SET_SRC (prev_set), var))))
1770 delete_insn (insn);
1772 else
1774 struct fixup_replacement *replacements = 0;
1775 rtx next_insn = NEXT_INSN (insn);
1777 if (SMALL_REGISTER_CLASSES)
1779 /* If the insn that copies the results of a CALL_INSN
1780 into a pseudo now references VAR, we have to use an
1781 intermediate pseudo since we want the life of the
1782 return value register to be only a single insn.
1784 If we don't use an intermediate pseudo, such things as
1785 address computations to make the address of VAR valid
1786 if it is not can be placed between the CALL_INSN and INSN.
1788 To make sure this doesn't happen, we record the destination
1789 of the CALL_INSN and see if the next insn uses both that
1790 and VAR. */
1792 if (call_dest != 0 && GET_CODE (insn) == INSN
1793 && reg_mentioned_p (var, PATTERN (insn))
1794 && reg_mentioned_p (call_dest, PATTERN (insn)))
1796 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1798 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1800 PATTERN (insn) = replace_rtx (PATTERN (insn),
1801 call_dest, temp);
1804 if (GET_CODE (insn) == CALL_INSN
1805 && GET_CODE (PATTERN (insn)) == SET)
1806 call_dest = SET_DEST (PATTERN (insn));
1807 else if (GET_CODE (insn) == CALL_INSN
1808 && GET_CODE (PATTERN (insn)) == PARALLEL
1809 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1810 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1811 else
1812 call_dest = 0;
1815 /* See if we have to do anything to INSN now that VAR is in
1816 memory. If it needs to be loaded into a pseudo, use a single
1817 pseudo for the entire insn in case there is a MATCH_DUP
1818 between two operands. We pass a pointer to the head of
1819 a list of struct fixup_replacements. If fixup_var_refs_1
1820 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1821 it will record them in this list.
1823 If it allocated a pseudo for any replacement, we copy into
1824 it here. */
1826 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1827 &replacements, no_share);
1829 /* If this is last_parm_insn, and any instructions were output
1830 after it to fix it up, then we must set last_parm_insn to
1831 the last such instruction emitted. */
1832 if (insn == last_parm_insn)
1833 last_parm_insn = PREV_INSN (next_insn);
1835 while (replacements)
1837 struct fixup_replacement *next;
1839 if (REG_P (replacements->new))
1841 rtx insert_before;
1842 rtx seq;
1844 /* OLD might be a (subreg (mem)). */
1845 if (GET_CODE (replacements->old) == SUBREG)
1846 replacements->old
1847 = fixup_memory_subreg (replacements->old, insn,
1848 promoted_mode, 0);
1849 else
1850 replacements->old
1851 = fixup_stack_1 (replacements->old, insn);
1853 insert_before = insn;
1855 /* If we are changing the mode, do a conversion.
1856 This might be wasteful, but combine.c will
1857 eliminate much of the waste. */
1859 if (GET_MODE (replacements->new)
1860 != GET_MODE (replacements->old))
1862 start_sequence ();
1863 convert_move (replacements->new,
1864 replacements->old, unsignedp);
1865 seq = get_insns ();
1866 end_sequence ();
1868 else
1869 seq = gen_move_insn (replacements->new,
1870 replacements->old);
1872 emit_insn_before (seq, insert_before);
1875 next = replacements->next;
1876 free (replacements);
1877 replacements = next;
1881 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1882 But don't touch other insns referred to by reg-notes;
1883 we will get them elsewhere. */
1884 while (note)
1886 if (GET_CODE (note) != INSN_LIST)
1887 XEXP (note, 0)
1888 = walk_fixup_memory_subreg (XEXP (note, 0), insn, var,
1889 promoted_mode, 1);
1890 note = XEXP (note, 1);
1894 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1895 See if the rtx expression at *LOC in INSN needs to be changed.
1897 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1898 contain a list of original rtx's and replacements. If we find that we need
1899 to modify this insn by replacing a memory reference with a pseudo or by
1900 making a new MEM to implement a SUBREG, we consult that list to see if
1901 we have already chosen a replacement. If none has already been allocated,
1902 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1903 or the SUBREG, as appropriate, to the pseudo. */
1905 static void
1906 fixup_var_refs_1 (rtx var, enum machine_mode promoted_mode, rtx *loc, rtx insn,
1907 struct fixup_replacement **replacements, rtx no_share)
1909 int i;
1910 rtx x = *loc;
1911 RTX_CODE code = GET_CODE (x);
1912 const char *fmt;
1913 rtx tem, tem1;
1914 struct fixup_replacement *replacement;
1916 switch (code)
1918 case ADDRESSOF:
1919 if (XEXP (x, 0) == var)
1921 /* Prevent sharing of rtl that might lose. */
1922 rtx sub = copy_rtx (XEXP (var, 0));
1924 if (! validate_change (insn, loc, sub, 0))
1926 rtx y = gen_reg_rtx (GET_MODE (sub));
1927 rtx seq, new_insn;
1929 /* We should be able to replace with a register or all is lost.
1930 Note that we can't use validate_change to verify this, since
1931 we're not caring for replacing all dups simultaneously. */
1932 if (! validate_replace_rtx (*loc, y, insn))
1933 abort ();
1935 /* Careful! First try to recognize a direct move of the
1936 value, mimicking how things are done in gen_reload wrt
1937 PLUS. Consider what happens when insn is a conditional
1938 move instruction and addsi3 clobbers flags. */
1940 start_sequence ();
1941 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1942 seq = get_insns ();
1943 end_sequence ();
1945 if (recog_memoized (new_insn) < 0)
1947 /* That failed. Fall back on force_operand and hope. */
1949 start_sequence ();
1950 sub = force_operand (sub, y);
1951 if (sub != y)
1952 emit_insn (gen_move_insn (y, sub));
1953 seq = get_insns ();
1954 end_sequence ();
1957 #ifdef HAVE_cc0
1958 /* Don't separate setter from user. */
1959 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1960 insn = PREV_INSN (insn);
1961 #endif
1963 emit_insn_before (seq, insn);
1966 return;
1968 case MEM:
1969 if (var == x)
1971 /* If we already have a replacement, use it. Otherwise,
1972 try to fix up this address in case it is invalid. */
1974 replacement = find_fixup_replacement (replacements, var);
1975 if (replacement->new)
1977 *loc = replacement->new;
1978 return;
1981 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1983 /* Unless we are forcing memory to register or we changed the mode,
1984 we can leave things the way they are if the insn is valid. */
1986 INSN_CODE (insn) = -1;
1987 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1988 && recog_memoized (insn) >= 0)
1989 return;
1991 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1992 return;
1995 /* If X contains VAR, we need to unshare it here so that we update
1996 each occurrence separately. But all identical MEMs in one insn
1997 must be replaced with the same rtx because of the possibility of
1998 MATCH_DUPs. */
2000 if (reg_mentioned_p (var, x))
2002 replacement = find_fixup_replacement (replacements, x);
2003 if (replacement->new == 0)
2004 replacement->new = copy_most_rtx (x, no_share);
2006 *loc = x = replacement->new;
2007 code = GET_CODE (x);
2009 break;
2011 case REG:
2012 case CC0:
2013 case PC:
2014 case CONST_INT:
2015 case CONST:
2016 case SYMBOL_REF:
2017 case LABEL_REF:
2018 case CONST_DOUBLE:
2019 case CONST_VECTOR:
2020 return;
2022 case SIGN_EXTRACT:
2023 case ZERO_EXTRACT:
2024 /* Note that in some cases those types of expressions are altered
2025 by optimize_bit_field, and do not survive to get here. */
2026 if (XEXP (x, 0) == var
2027 || (GET_CODE (XEXP (x, 0)) == SUBREG
2028 && SUBREG_REG (XEXP (x, 0)) == var))
2030 /* Get TEM as a valid MEM in the mode presently in the insn.
2032 We don't worry about the possibility of MATCH_DUP here; it
2033 is highly unlikely and would be tricky to handle. */
2035 tem = XEXP (x, 0);
2036 if (GET_CODE (tem) == SUBREG)
2038 if (GET_MODE_BITSIZE (GET_MODE (tem))
2039 > GET_MODE_BITSIZE (GET_MODE (var)))
2041 replacement = find_fixup_replacement (replacements, var);
2042 if (replacement->new == 0)
2043 replacement->new = gen_reg_rtx (GET_MODE (var));
2044 SUBREG_REG (tem) = replacement->new;
2046 /* The following code works only if we have a MEM, so we
2047 need to handle the subreg here. We directly substitute
2048 it assuming that a subreg must be OK here. We already
2049 scheduled a replacement to copy the mem into the
2050 subreg. */
2051 XEXP (x, 0) = tem;
2052 return;
2054 else
2055 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2057 else
2058 tem = fixup_stack_1 (tem, insn);
2060 /* Unless we want to load from memory, get TEM into the proper mode
2061 for an extract from memory. This can only be done if the
2062 extract is at a constant position and length. */
2064 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2065 && GET_CODE (XEXP (x, 2)) == CONST_INT
2066 && ! mode_dependent_address_p (XEXP (tem, 0))
2067 && ! MEM_VOLATILE_P (tem))
2069 enum machine_mode wanted_mode = VOIDmode;
2070 enum machine_mode is_mode = GET_MODE (tem);
2071 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2073 if (GET_CODE (x) == ZERO_EXTRACT)
2075 enum machine_mode new_mode
2076 = mode_for_extraction (EP_extzv, 1);
2077 if (new_mode != MAX_MACHINE_MODE)
2078 wanted_mode = new_mode;
2080 else if (GET_CODE (x) == SIGN_EXTRACT)
2082 enum machine_mode new_mode
2083 = mode_for_extraction (EP_extv, 1);
2084 if (new_mode != MAX_MACHINE_MODE)
2085 wanted_mode = new_mode;
2088 /* If we have a narrower mode, we can do something. */
2089 if (wanted_mode != VOIDmode
2090 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2092 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2093 rtx old_pos = XEXP (x, 2);
2094 rtx newmem;
2096 /* If the bytes and bits are counted differently, we
2097 must adjust the offset. */
2098 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2099 offset = (GET_MODE_SIZE (is_mode)
2100 - GET_MODE_SIZE (wanted_mode) - offset);
2102 pos %= GET_MODE_BITSIZE (wanted_mode);
2104 newmem = adjust_address_nv (tem, wanted_mode, offset);
2106 /* Make the change and see if the insn remains valid. */
2107 INSN_CODE (insn) = -1;
2108 XEXP (x, 0) = newmem;
2109 XEXP (x, 2) = GEN_INT (pos);
2111 if (recog_memoized (insn) >= 0)
2112 return;
2114 /* Otherwise, restore old position. XEXP (x, 0) will be
2115 restored later. */
2116 XEXP (x, 2) = old_pos;
2120 /* If we get here, the bitfield extract insn can't accept a memory
2121 reference. Copy the input into a register. */
2123 tem1 = gen_reg_rtx (GET_MODE (tem));
2124 emit_insn_before (gen_move_insn (tem1, tem), insn);
2125 XEXP (x, 0) = tem1;
2126 return;
2128 break;
2130 case SUBREG:
2131 if (SUBREG_REG (x) == var)
2133 /* If this is a special SUBREG made because VAR was promoted
2134 from a wider mode, replace it with VAR and call ourself
2135 recursively, this time saying that the object previously
2136 had its current mode (by virtue of the SUBREG). */
2138 if (SUBREG_PROMOTED_VAR_P (x))
2140 *loc = var;
2141 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2142 no_share);
2143 return;
2146 /* If this SUBREG makes VAR wider, it has become a paradoxical
2147 SUBREG with VAR in memory, but these aren't allowed at this
2148 stage of the compilation. So load VAR into a pseudo and take
2149 a SUBREG of that pseudo. */
2150 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2152 replacement = find_fixup_replacement (replacements, var);
2153 if (replacement->new == 0)
2154 replacement->new = gen_reg_rtx (promoted_mode);
2155 SUBREG_REG (x) = replacement->new;
2156 return;
2159 /* See if we have already found a replacement for this SUBREG.
2160 If so, use it. Otherwise, make a MEM and see if the insn
2161 is recognized. If not, or if we should force MEM into a register,
2162 make a pseudo for this SUBREG. */
2163 replacement = find_fixup_replacement (replacements, x);
2164 if (replacement->new)
2166 enum machine_mode mode = GET_MODE (x);
2167 *loc = replacement->new;
2169 /* Careful! We may have just replaced a SUBREG by a MEM, which
2170 means that the insn may have become invalid again. We can't
2171 in this case make a new replacement since we already have one
2172 and we must deal with MATCH_DUPs. */
2173 if (GET_CODE (replacement->new) == MEM)
2175 INSN_CODE (insn) = -1;
2176 if (recog_memoized (insn) >= 0)
2177 return;
2179 fixup_var_refs_1 (replacement->new, mode, &PATTERN (insn),
2180 insn, replacements, no_share);
2183 return;
2186 replacement->new = *loc = fixup_memory_subreg (x, insn,
2187 promoted_mode, 0);
2189 INSN_CODE (insn) = -1;
2190 if (! flag_force_mem && recog_memoized (insn) >= 0)
2191 return;
2193 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2194 return;
2196 break;
2198 case SET:
2199 /* First do special simplification of bit-field references. */
2200 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2201 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2202 optimize_bit_field (x, insn, 0);
2203 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2204 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2205 optimize_bit_field (x, insn, 0);
2207 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2208 into a register and then store it back out. */
2209 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2210 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2211 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2212 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2213 > GET_MODE_SIZE (GET_MODE (var))))
2215 replacement = find_fixup_replacement (replacements, var);
2216 if (replacement->new == 0)
2217 replacement->new = gen_reg_rtx (GET_MODE (var));
2219 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2220 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2223 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2224 insn into a pseudo and store the low part of the pseudo into VAR. */
2225 if (GET_CODE (SET_DEST (x)) == SUBREG
2226 && SUBREG_REG (SET_DEST (x)) == var
2227 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2228 > GET_MODE_SIZE (GET_MODE (var))))
2230 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2231 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2232 tem)),
2233 insn);
2234 break;
2238 rtx dest = SET_DEST (x);
2239 rtx src = SET_SRC (x);
2240 rtx outerdest = dest;
2242 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2243 || GET_CODE (dest) == SIGN_EXTRACT
2244 || GET_CODE (dest) == ZERO_EXTRACT)
2245 dest = XEXP (dest, 0);
2247 if (GET_CODE (src) == SUBREG)
2248 src = SUBREG_REG (src);
2250 /* If VAR does not appear at the top level of the SET
2251 just scan the lower levels of the tree. */
2253 if (src != var && dest != var)
2254 break;
2256 /* We will need to rerecognize this insn. */
2257 INSN_CODE (insn) = -1;
2259 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2260 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2262 /* Since this case will return, ensure we fixup all the
2263 operands here. */
2264 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2265 insn, replacements, no_share);
2266 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2267 insn, replacements, no_share);
2268 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2269 insn, replacements, no_share);
2271 tem = XEXP (outerdest, 0);
2273 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2274 that may appear inside a ZERO_EXTRACT.
2275 This was legitimate when the MEM was a REG. */
2276 if (GET_CODE (tem) == SUBREG
2277 && SUBREG_REG (tem) == var)
2278 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2279 else
2280 tem = fixup_stack_1 (tem, insn);
2282 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2283 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2284 && ! mode_dependent_address_p (XEXP (tem, 0))
2285 && ! MEM_VOLATILE_P (tem))
2287 enum machine_mode wanted_mode;
2288 enum machine_mode is_mode = GET_MODE (tem);
2289 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2291 wanted_mode = mode_for_extraction (EP_insv, 0);
2293 /* If we have a narrower mode, we can do something. */
2294 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2296 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2297 rtx old_pos = XEXP (outerdest, 2);
2298 rtx newmem;
2300 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2301 offset = (GET_MODE_SIZE (is_mode)
2302 - GET_MODE_SIZE (wanted_mode) - offset);
2304 pos %= GET_MODE_BITSIZE (wanted_mode);
2306 newmem = adjust_address_nv (tem, wanted_mode, offset);
2308 /* Make the change and see if the insn remains valid. */
2309 INSN_CODE (insn) = -1;
2310 XEXP (outerdest, 0) = newmem;
2311 XEXP (outerdest, 2) = GEN_INT (pos);
2313 if (recog_memoized (insn) >= 0)
2314 return;
2316 /* Otherwise, restore old position. XEXP (x, 0) will be
2317 restored later. */
2318 XEXP (outerdest, 2) = old_pos;
2322 /* If we get here, the bit-field store doesn't allow memory
2323 or isn't located at a constant position. Load the value into
2324 a register, do the store, and put it back into memory. */
2326 tem1 = gen_reg_rtx (GET_MODE (tem));
2327 emit_insn_before (gen_move_insn (tem1, tem), insn);
2328 emit_insn_after (gen_move_insn (tem, tem1), insn);
2329 XEXP (outerdest, 0) = tem1;
2330 return;
2333 /* STRICT_LOW_PART is a no-op on memory references
2334 and it can cause combinations to be unrecognizable,
2335 so eliminate it. */
2337 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2338 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2340 /* A valid insn to copy VAR into or out of a register
2341 must be left alone, to avoid an infinite loop here.
2342 If the reference to VAR is by a subreg, fix that up,
2343 since SUBREG is not valid for a memref.
2344 Also fix up the address of the stack slot.
2346 Note that we must not try to recognize the insn until
2347 after we know that we have valid addresses and no
2348 (subreg (mem ...) ...) constructs, since these interfere
2349 with determining the validity of the insn. */
2351 if ((SET_SRC (x) == var
2352 || (GET_CODE (SET_SRC (x)) == SUBREG
2353 && SUBREG_REG (SET_SRC (x)) == var))
2354 && (REG_P (SET_DEST (x))
2355 || (GET_CODE (SET_DEST (x)) == SUBREG
2356 && REG_P (SUBREG_REG (SET_DEST (x)))))
2357 && GET_MODE (var) == promoted_mode
2358 && x == single_set (insn))
2360 rtx pat, last;
2362 if (GET_CODE (SET_SRC (x)) == SUBREG
2363 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2364 > GET_MODE_SIZE (GET_MODE (var))))
2366 /* This (subreg VAR) is now a paradoxical subreg. We need
2367 to replace VAR instead of the subreg. */
2368 replacement = find_fixup_replacement (replacements, var);
2369 if (replacement->new == NULL_RTX)
2370 replacement->new = gen_reg_rtx (GET_MODE (var));
2371 SUBREG_REG (SET_SRC (x)) = replacement->new;
2373 else
2375 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2376 if (replacement->new)
2377 SET_SRC (x) = replacement->new;
2378 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2379 SET_SRC (x) = replacement->new
2380 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2382 else
2383 SET_SRC (x) = replacement->new
2384 = fixup_stack_1 (SET_SRC (x), insn);
2387 if (recog_memoized (insn) >= 0)
2388 return;
2390 /* INSN is not valid, but we know that we want to
2391 copy SET_SRC (x) to SET_DEST (x) in some way. So
2392 we generate the move and see whether it requires more
2393 than one insn. If it does, we emit those insns and
2394 delete INSN. Otherwise, we can just replace the pattern
2395 of INSN; we have already verified above that INSN has
2396 no other function that to do X. */
2398 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2399 if (NEXT_INSN (pat) != NULL_RTX)
2401 last = emit_insn_before (pat, insn);
2403 /* INSN might have REG_RETVAL or other important notes, so
2404 we need to store the pattern of the last insn in the
2405 sequence into INSN similarly to the normal case. LAST
2406 should not have REG_NOTES, but we allow them if INSN has
2407 no REG_NOTES. */
2408 if (REG_NOTES (last) && REG_NOTES (insn))
2409 abort ();
2410 if (REG_NOTES (last))
2411 REG_NOTES (insn) = REG_NOTES (last);
2412 PATTERN (insn) = PATTERN (last);
2414 delete_insn (last);
2416 else
2417 PATTERN (insn) = PATTERN (pat);
2419 return;
2422 if ((SET_DEST (x) == var
2423 || (GET_CODE (SET_DEST (x)) == SUBREG
2424 && SUBREG_REG (SET_DEST (x)) == var))
2425 && (REG_P (SET_SRC (x))
2426 || (GET_CODE (SET_SRC (x)) == SUBREG
2427 && REG_P (SUBREG_REG (SET_SRC (x)))))
2428 && GET_MODE (var) == promoted_mode
2429 && x == single_set (insn))
2431 rtx pat, last;
2433 if (GET_CODE (SET_DEST (x)) == SUBREG)
2434 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2435 promoted_mode, 0);
2436 else
2437 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2439 if (recog_memoized (insn) >= 0)
2440 return;
2442 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2443 if (NEXT_INSN (pat) != NULL_RTX)
2445 last = emit_insn_before (pat, insn);
2447 /* INSN might have REG_RETVAL or other important notes, so
2448 we need to store the pattern of the last insn in the
2449 sequence into INSN similarly to the normal case. LAST
2450 should not have REG_NOTES, but we allow them if INSN has
2451 no REG_NOTES. */
2452 if (REG_NOTES (last) && REG_NOTES (insn))
2453 abort ();
2454 if (REG_NOTES (last))
2455 REG_NOTES (insn) = REG_NOTES (last);
2456 PATTERN (insn) = PATTERN (last);
2458 delete_insn (last);
2460 else
2461 PATTERN (insn) = PATTERN (pat);
2463 return;
2466 /* Otherwise, storing into VAR must be handled specially
2467 by storing into a temporary and copying that into VAR
2468 with a new insn after this one. Note that this case
2469 will be used when storing into a promoted scalar since
2470 the insn will now have different modes on the input
2471 and output and hence will be invalid (except for the case
2472 of setting it to a constant, which does not need any
2473 change if it is valid). We generate extra code in that case,
2474 but combine.c will eliminate it. */
2476 if (dest == var)
2478 rtx temp;
2479 rtx fixeddest = SET_DEST (x);
2480 enum machine_mode temp_mode;
2482 /* STRICT_LOW_PART can be discarded, around a MEM. */
2483 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2484 fixeddest = XEXP (fixeddest, 0);
2485 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2486 if (GET_CODE (fixeddest) == SUBREG)
2488 fixeddest = fixup_memory_subreg (fixeddest, insn,
2489 promoted_mode, 0);
2490 temp_mode = GET_MODE (fixeddest);
2492 else
2494 fixeddest = fixup_stack_1 (fixeddest, insn);
2495 temp_mode = promoted_mode;
2498 temp = gen_reg_rtx (temp_mode);
2500 emit_insn_after (gen_move_insn (fixeddest,
2501 gen_lowpart (GET_MODE (fixeddest),
2502 temp)),
2503 insn);
2505 SET_DEST (x) = temp;
2509 default:
2510 break;
2513 /* Nothing special about this RTX; fix its operands. */
2515 fmt = GET_RTX_FORMAT (code);
2516 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2518 if (fmt[i] == 'e')
2519 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2520 no_share);
2521 else if (fmt[i] == 'E')
2523 int j;
2524 for (j = 0; j < XVECLEN (x, i); j++)
2525 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2526 insn, replacements, no_share);
2531 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2532 The REG was placed on the stack, so X now has the form (SUBREG:m1
2533 (MEM:m2 ...)).
2535 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2536 must be emitted to compute NEWADDR, put them before INSN.
2538 UNCRITICAL nonzero means accept paradoxical subregs.
2539 This is used for subregs found inside REG_NOTES. */
2541 static rtx
2542 fixup_memory_subreg (rtx x, rtx insn, enum machine_mode promoted_mode, int uncritical)
2544 int offset;
2545 rtx mem = SUBREG_REG (x);
2546 rtx addr = XEXP (mem, 0);
2547 enum machine_mode mode = GET_MODE (x);
2548 rtx result, seq;
2550 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2551 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2552 abort ();
2554 offset = SUBREG_BYTE (x);
2555 if (BYTES_BIG_ENDIAN)
2556 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2557 the offset so that it points to the right location within the
2558 MEM. */
2559 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2561 if (!flag_force_addr
2562 && memory_address_p (mode, plus_constant (addr, offset)))
2563 /* Shortcut if no insns need be emitted. */
2564 return adjust_address (mem, mode, offset);
2566 start_sequence ();
2567 result = adjust_address (mem, mode, offset);
2568 seq = get_insns ();
2569 end_sequence ();
2571 emit_insn_before (seq, insn);
2572 return result;
2575 /* Do fixup_memory_subreg on all (SUBREG (VAR) ...) contained in X.
2576 VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2577 Replace subexpressions of X in place.
2578 If X itself is a (SUBREG (VAR) ...), return the replacement expression.
2579 Otherwise return X, with its contents possibly altered.
2581 INSN and UNCRITICAL are as for fixup_memory_subreg. */
2583 static rtx
2584 walk_fixup_memory_subreg (rtx x, rtx insn, rtx var,
2585 enum machine_mode promoted_mode, int uncritical)
2587 enum rtx_code code;
2588 const char *fmt;
2589 int i;
2591 if (x == 0)
2592 return 0;
2594 code = GET_CODE (x);
2596 if (code == SUBREG && SUBREG_REG (x) == var)
2597 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2599 /* Nothing special about this RTX; fix its operands. */
2601 fmt = GET_RTX_FORMAT (code);
2602 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2604 if (fmt[i] == 'e')
2605 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, var,
2606 promoted_mode, uncritical);
2607 else if (fmt[i] == 'E')
2609 int j;
2610 for (j = 0; j < XVECLEN (x, i); j++)
2611 XVECEXP (x, i, j)
2612 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, var,
2613 promoted_mode, uncritical);
2616 return x;
2619 /* For each memory ref within X, if it refers to a stack slot
2620 with an out of range displacement, put the address in a temp register
2621 (emitting new insns before INSN to load these registers)
2622 and alter the memory ref to use that register.
2623 Replace each such MEM rtx with a copy, to avoid clobberage. */
2625 static rtx
2626 fixup_stack_1 (rtx x, rtx insn)
2628 int i;
2629 RTX_CODE code = GET_CODE (x);
2630 const char *fmt;
2632 if (code == MEM)
2634 rtx ad = XEXP (x, 0);
2635 /* If we have address of a stack slot but it's not valid
2636 (displacement is too large), compute the sum in a register. */
2637 if (GET_CODE (ad) == PLUS
2638 && REG_P (XEXP (ad, 0))
2639 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2640 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2641 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2642 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2643 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2644 #endif
2645 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2646 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2647 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2648 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2650 rtx temp, seq;
2651 if (memory_address_p (GET_MODE (x), ad))
2652 return x;
2654 start_sequence ();
2655 temp = copy_to_reg (ad);
2656 seq = get_insns ();
2657 end_sequence ();
2658 emit_insn_before (seq, insn);
2659 return replace_equiv_address (x, temp);
2661 return x;
2664 fmt = GET_RTX_FORMAT (code);
2665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2667 if (fmt[i] == 'e')
2668 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2669 else if (fmt[i] == 'E')
2671 int j;
2672 for (j = 0; j < XVECLEN (x, i); j++)
2673 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2676 return x;
2679 /* Optimization: a bit-field instruction whose field
2680 happens to be a byte or halfword in memory
2681 can be changed to a move instruction.
2683 We call here when INSN is an insn to examine or store into a bit-field.
2684 BODY is the SET-rtx to be altered.
2686 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2687 (Currently this is called only from function.c, and EQUIV_MEM
2688 is always 0.) */
2690 static void
2691 optimize_bit_field (rtx body, rtx insn, rtx *equiv_mem)
2693 rtx bitfield;
2694 int destflag;
2695 rtx seq = 0;
2696 enum machine_mode mode;
2698 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2699 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2700 bitfield = SET_DEST (body), destflag = 1;
2701 else
2702 bitfield = SET_SRC (body), destflag = 0;
2704 /* First check that the field being stored has constant size and position
2705 and is in fact a byte or halfword suitably aligned. */
2707 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2708 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2709 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2710 != BLKmode)
2711 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2713 rtx memref = 0;
2715 /* Now check that the containing word is memory, not a register,
2716 and that it is safe to change the machine mode. */
2718 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2719 memref = XEXP (bitfield, 0);
2720 else if (REG_P (XEXP (bitfield, 0))
2721 && equiv_mem != 0)
2722 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2723 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2724 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2725 memref = SUBREG_REG (XEXP (bitfield, 0));
2726 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2727 && equiv_mem != 0
2728 && REG_P (SUBREG_REG (XEXP (bitfield, 0))))
2729 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2731 if (memref
2732 && ! mode_dependent_address_p (XEXP (memref, 0))
2733 && ! MEM_VOLATILE_P (memref))
2735 /* Now adjust the address, first for any subreg'ing
2736 that we are now getting rid of,
2737 and then for which byte of the word is wanted. */
2739 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2740 rtx insns;
2742 /* Adjust OFFSET to count bits from low-address byte. */
2743 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2744 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2745 - offset - INTVAL (XEXP (bitfield, 1)));
2747 /* Adjust OFFSET to count bytes from low-address byte. */
2748 offset /= BITS_PER_UNIT;
2749 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2751 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2752 / UNITS_PER_WORD) * UNITS_PER_WORD;
2753 if (BYTES_BIG_ENDIAN)
2754 offset -= (MIN (UNITS_PER_WORD,
2755 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2756 - MIN (UNITS_PER_WORD,
2757 GET_MODE_SIZE (GET_MODE (memref))));
2760 start_sequence ();
2761 memref = adjust_address (memref, mode, offset);
2762 insns = get_insns ();
2763 end_sequence ();
2764 emit_insn_before (insns, insn);
2766 /* Store this memory reference where
2767 we found the bit field reference. */
2769 if (destflag)
2771 validate_change (insn, &SET_DEST (body), memref, 1);
2772 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2774 rtx src = SET_SRC (body);
2775 while (GET_CODE (src) == SUBREG
2776 && SUBREG_BYTE (src) == 0)
2777 src = SUBREG_REG (src);
2778 if (GET_MODE (src) != GET_MODE (memref))
2779 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2780 validate_change (insn, &SET_SRC (body), src, 1);
2782 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2783 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2784 /* This shouldn't happen because anything that didn't have
2785 one of these modes should have got converted explicitly
2786 and then referenced through a subreg.
2787 This is so because the original bit-field was
2788 handled by agg_mode and so its tree structure had
2789 the same mode that memref now has. */
2790 abort ();
2792 else
2794 rtx dest = SET_DEST (body);
2796 while (GET_CODE (dest) == SUBREG
2797 && SUBREG_BYTE (dest) == 0
2798 && (GET_MODE_CLASS (GET_MODE (dest))
2799 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2800 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2801 <= UNITS_PER_WORD))
2802 dest = SUBREG_REG (dest);
2804 validate_change (insn, &SET_DEST (body), dest, 1);
2806 if (GET_MODE (dest) == GET_MODE (memref))
2807 validate_change (insn, &SET_SRC (body), memref, 1);
2808 else
2810 /* Convert the mem ref to the destination mode. */
2811 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2813 start_sequence ();
2814 convert_move (newreg, memref,
2815 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2816 seq = get_insns ();
2817 end_sequence ();
2819 validate_change (insn, &SET_SRC (body), newreg, 1);
2823 /* See if we can convert this extraction or insertion into
2824 a simple move insn. We might not be able to do so if this
2825 was, for example, part of a PARALLEL.
2827 If we succeed, write out any needed conversions. If we fail,
2828 it is hard to guess why we failed, so don't do anything
2829 special; just let the optimization be suppressed. */
2831 if (apply_change_group () && seq)
2832 emit_insn_before (seq, insn);
2837 /* These routines are responsible for converting virtual register references
2838 to the actual hard register references once RTL generation is complete.
2840 The following four variables are used for communication between the
2841 routines. They contain the offsets of the virtual registers from their
2842 respective hard registers. */
2844 static int in_arg_offset;
2845 static int var_offset;
2846 static int dynamic_offset;
2847 static int out_arg_offset;
2848 static int cfa_offset;
2850 /* In most machines, the stack pointer register is equivalent to the bottom
2851 of the stack. */
2853 #ifndef STACK_POINTER_OFFSET
2854 #define STACK_POINTER_OFFSET 0
2855 #endif
2857 /* If not defined, pick an appropriate default for the offset of dynamically
2858 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2859 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2861 #ifndef STACK_DYNAMIC_OFFSET
2863 /* The bottom of the stack points to the actual arguments. If
2864 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2865 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2866 stack space for register parameters is not pushed by the caller, but
2867 rather part of the fixed stack areas and hence not included in
2868 `current_function_outgoing_args_size'. Nevertheless, we must allow
2869 for it when allocating stack dynamic objects. */
2871 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2872 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2873 ((ACCUMULATE_OUTGOING_ARGS \
2874 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2875 + (STACK_POINTER_OFFSET)) \
2877 #else
2878 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2879 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2880 + (STACK_POINTER_OFFSET))
2881 #endif
2882 #endif
2884 /* On most machines, the CFA coincides with the first incoming parm. */
2886 #ifndef ARG_POINTER_CFA_OFFSET
2887 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2888 #endif
2890 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2891 had its address taken. DECL is the decl or SAVE_EXPR for the
2892 object stored in the register, for later use if we do need to force
2893 REG into the stack. REG is overwritten by the MEM like in
2894 put_reg_into_stack. RESCAN is true if previously emitted
2895 instructions must be rescanned and modified now that the REG has
2896 been transformed. */
2899 gen_mem_addressof (rtx reg, tree decl, int rescan)
2901 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2902 REGNO (reg), decl);
2904 /* Calculate this before we start messing with decl's RTL. */
2905 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2907 /* If the original REG was a user-variable, then so is the REG whose
2908 address is being taken. Likewise for unchanging. */
2909 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2910 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2912 PUT_CODE (reg, MEM);
2913 MEM_VOLATILE_P (reg) = 0;
2914 MEM_ATTRS (reg) = 0;
2915 XEXP (reg, 0) = r;
2917 if (decl)
2919 tree type = TREE_TYPE (decl);
2920 enum machine_mode decl_mode
2921 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2922 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2923 : DECL_RTL_IF_SET (decl));
2925 PUT_MODE (reg, decl_mode);
2927 /* Clear DECL_RTL momentarily so functions below will work
2928 properly, then set it again. */
2929 if (DECL_P (decl) && decl_rtl == reg)
2930 SET_DECL_RTL (decl, 0);
2932 set_mem_attributes (reg, decl, 1);
2933 set_mem_alias_set (reg, set);
2935 if (DECL_P (decl) && decl_rtl == reg)
2936 SET_DECL_RTL (decl, reg);
2938 if (rescan
2939 && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
2940 fixup_var_refs (reg, GET_MODE (reg), TYPE_UNSIGNED (type), reg, 0);
2942 else if (rescan)
2944 /* This can only happen during reload. Clear the same flag bits as
2945 reload. */
2946 RTX_UNCHANGING_P (reg) = 0;
2947 MEM_IN_STRUCT_P (reg) = 0;
2948 MEM_SCALAR_P (reg) = 0;
2950 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2953 return reg;
2956 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2958 void
2959 flush_addressof (tree decl)
2961 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2962 && DECL_RTL (decl) != 0
2963 && GET_CODE (DECL_RTL (decl)) == MEM
2964 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2965 && REG_P (XEXP (XEXP (DECL_RTL (decl), 0), 0)))
2966 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2969 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2971 static void
2972 put_addressof_into_stack (rtx r, htab_t ht)
2974 tree decl, type;
2975 bool volatile_p, used_p;
2977 rtx reg = XEXP (r, 0);
2979 if (!REG_P (reg))
2980 abort ();
2982 decl = ADDRESSOF_DECL (r);
2983 if (decl)
2985 type = TREE_TYPE (decl);
2986 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2987 && TREE_THIS_VOLATILE (decl));
2988 used_p = (TREE_USED (decl)
2989 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2991 else
2993 type = NULL_TREE;
2994 volatile_p = false;
2995 used_p = true;
2998 put_reg_into_stack (0, reg, type, GET_MODE (reg), ADDRESSOF_REGNO (r),
2999 volatile_p, used_p, false, ht);
3002 /* List of replacements made below in purge_addressof_1 when creating
3003 bitfield insertions. */
3004 static rtx purge_bitfield_addressof_replacements;
3006 /* List of replacements made below in purge_addressof_1 for patterns
3007 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3008 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3009 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3010 enough in complex cases, e.g. when some field values can be
3011 extracted by usage MEM with narrower mode. */
3012 static rtx purge_addressof_replacements;
3014 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3015 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3016 the stack. If the function returns FALSE then the replacement could not
3017 be made. If MAY_POSTPONE is true and we would not put the addressof
3018 to stack, postpone processing of the insn. */
3020 static bool
3021 purge_addressof_1 (rtx *loc, rtx insn, int force, int store, int may_postpone,
3022 htab_t ht)
3024 rtx x;
3025 RTX_CODE code;
3026 int i, j;
3027 const char *fmt;
3028 bool result = true;
3029 bool libcall = false;
3031 /* Re-start here to avoid recursion in common cases. */
3032 restart:
3034 x = *loc;
3035 if (x == 0)
3036 return true;
3038 /* Is this a libcall? */
3039 if (!insn)
3040 libcall = REG_NOTE_KIND (*loc) == REG_RETVAL;
3042 code = GET_CODE (x);
3044 /* If we don't return in any of the cases below, we will recurse inside
3045 the RTX, which will normally result in any ADDRESSOF being forced into
3046 memory. */
3047 if (code == SET)
3049 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1,
3050 may_postpone, ht);
3051 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0,
3052 may_postpone, ht);
3053 return result;
3055 else if (code == ADDRESSOF)
3057 rtx sub, insns;
3059 if (GET_CODE (XEXP (x, 0)) != MEM)
3060 put_addressof_into_stack (x, ht);
3062 /* We must create a copy of the rtx because it was created by
3063 overwriting a REG rtx which is always shared. */
3064 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3065 if (validate_change (insn, loc, sub, 0)
3066 || validate_replace_rtx (x, sub, insn))
3067 return true;
3069 start_sequence ();
3071 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3072 Otherwise, perhaps SUB is an expression, so generate code to compute
3073 it. */
3074 if (REG_P (sub) && REGNO (sub) <= LAST_VIRTUAL_REGISTER)
3075 sub = copy_to_reg (sub);
3076 else
3077 sub = force_operand (sub, NULL_RTX);
3079 if (! validate_change (insn, loc, sub, 0)
3080 && ! validate_replace_rtx (x, sub, insn))
3081 abort ();
3083 insns = get_insns ();
3084 end_sequence ();
3085 emit_insn_before (insns, insn);
3086 return true;
3089 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3091 rtx sub = XEXP (XEXP (x, 0), 0);
3093 if (GET_CODE (sub) == MEM)
3094 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3095 else if (REG_P (sub)
3096 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3098 else if (REG_P (sub) && GET_MODE (x) != GET_MODE (sub))
3100 int size_x, size_sub;
3102 if (may_postpone)
3104 /* Postpone for now, so that we do not emit bitfield arithmetics
3105 unless there is some benefit from it. */
3106 if (!postponed_insns || XEXP (postponed_insns, 0) != insn)
3107 postponed_insns = alloc_INSN_LIST (insn, postponed_insns);
3108 return true;
3111 if (!insn)
3113 /* When processing REG_NOTES look at the list of
3114 replacements done on the insn to find the register that X
3115 was replaced by. */
3116 rtx tem;
3118 for (tem = purge_bitfield_addressof_replacements;
3119 tem != NULL_RTX;
3120 tem = XEXP (XEXP (tem, 1), 1))
3121 if (rtx_equal_p (x, XEXP (tem, 0)))
3123 *loc = XEXP (XEXP (tem, 1), 0);
3124 return true;
3127 /* See comment for purge_addressof_replacements. */
3128 for (tem = purge_addressof_replacements;
3129 tem != NULL_RTX;
3130 tem = XEXP (XEXP (tem, 1), 1))
3131 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3133 rtx z = XEXP (XEXP (tem, 1), 0);
3135 if (GET_MODE (x) == GET_MODE (z)
3136 || (!REG_P (XEXP (XEXP (tem, 1), 0))
3137 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3138 abort ();
3140 /* It can happen that the note may speak of things
3141 in a wider (or just different) mode than the
3142 code did. This is especially true of
3143 REG_RETVAL. */
3145 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3146 z = SUBREG_REG (z);
3148 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3149 && (GET_MODE_SIZE (GET_MODE (x))
3150 > GET_MODE_SIZE (GET_MODE (z))))
3152 /* This can occur as a result in invalid
3153 pointer casts, e.g. float f; ...
3154 *(long long int *)&f.
3155 ??? We could emit a warning here, but
3156 without a line number that wouldn't be
3157 very helpful. */
3158 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3160 else
3161 z = gen_lowpart (GET_MODE (x), z);
3163 *loc = z;
3164 return true;
3167 /* When we are processing the REG_NOTES of the last instruction
3168 of a libcall, there will be typically no replacements
3169 for that insn; the replacements happened before, piecemeal
3170 fashion. OTOH we are not interested in the details of
3171 this for the REG_EQUAL note, we want to know the big picture,
3172 which can be succinctly described with a simple SUBREG.
3173 Note that removing the REG_EQUAL note is not an option
3174 on the last insn of a libcall, so we must do a replacement. */
3176 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3177 we got
3178 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3179 [0 S8 A32]), which can be expressed with a simple
3180 same-size subreg */
3181 if ((GET_MODE_SIZE (GET_MODE (x))
3182 <= GET_MODE_SIZE (GET_MODE (sub)))
3183 /* Again, invalid pointer casts (as in
3184 compile/990203-1.c) can require paradoxical
3185 subregs. */
3186 || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3187 && (GET_MODE_SIZE (GET_MODE (x))
3188 > GET_MODE_SIZE (GET_MODE (sub)))
3189 && libcall))
3191 *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
3192 return true;
3194 /* ??? Are there other cases we should handle? */
3196 /* Sometimes we may not be able to find the replacement. For
3197 example when the original insn was a MEM in a wider mode,
3198 and the note is part of a sign extension of a narrowed
3199 version of that MEM. Gcc testcase compile/990829-1.c can
3200 generate an example of this situation. Rather than complain
3201 we return false, which will prompt our caller to remove the
3202 offending note. */
3203 return false;
3206 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3207 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3209 /* Do not frob unchanging MEMs. If a later reference forces the
3210 pseudo to the stack, we can wind up with multiple writes to
3211 an unchanging memory, which is invalid. */
3212 if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3215 /* Don't even consider working with paradoxical subregs,
3216 or the moral equivalent seen here. */
3217 else if (size_x <= size_sub
3218 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3220 /* Do a bitfield insertion to mirror what would happen
3221 in memory. */
3223 rtx val, seq;
3225 if (store)
3227 rtx p = PREV_INSN (insn);
3229 start_sequence ();
3230 val = gen_reg_rtx (GET_MODE (x));
3231 if (! validate_change (insn, loc, val, 0))
3233 /* Discard the current sequence and put the
3234 ADDRESSOF on stack. */
3235 end_sequence ();
3236 goto give_up;
3238 seq = get_insns ();
3239 end_sequence ();
3240 emit_insn_before (seq, insn);
3241 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3242 insn, ht);
3244 start_sequence ();
3245 store_bit_field (sub, size_x, 0, GET_MODE (x),
3246 val, GET_MODE_SIZE (GET_MODE (sub)));
3248 /* Make sure to unshare any shared rtl that store_bit_field
3249 might have created. */
3250 unshare_all_rtl_again (get_insns ());
3252 seq = get_insns ();
3253 end_sequence ();
3254 p = emit_insn_after (seq, insn);
3255 if (NEXT_INSN (insn))
3256 compute_insns_for_mem (NEXT_INSN (insn),
3257 p ? NEXT_INSN (p) : NULL_RTX,
3258 ht);
3260 else
3262 rtx p = PREV_INSN (insn);
3264 start_sequence ();
3265 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3266 GET_MODE (x), GET_MODE (x),
3267 GET_MODE_SIZE (GET_MODE (sub)));
3269 if (! validate_change (insn, loc, val, 0))
3271 /* Discard the current sequence and put the
3272 ADDRESSOF on stack. */
3273 end_sequence ();
3274 goto give_up;
3277 seq = get_insns ();
3278 end_sequence ();
3279 emit_insn_before (seq, insn);
3280 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3281 insn, ht);
3284 /* Remember the replacement so that the same one can be done
3285 on the REG_NOTES. */
3286 purge_bitfield_addressof_replacements
3287 = gen_rtx_EXPR_LIST (VOIDmode, x,
3288 gen_rtx_EXPR_LIST
3289 (VOIDmode, val,
3290 purge_bitfield_addressof_replacements));
3292 /* We replaced with a reg -- all done. */
3293 return true;
3297 else if (validate_change (insn, loc, sub, 0))
3299 /* Remember the replacement so that the same one can be done
3300 on the REG_NOTES. */
3301 if (REG_P (sub) || GET_CODE (sub) == SUBREG)
3303 rtx tem;
3305 for (tem = purge_addressof_replacements;
3306 tem != NULL_RTX;
3307 tem = XEXP (XEXP (tem, 1), 1))
3308 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3310 XEXP (XEXP (tem, 1), 0) = sub;
3311 return true;
3313 purge_addressof_replacements
3314 = gen_rtx_EXPR_LIST (VOIDmode, XEXP (x, 0),
3315 gen_rtx_EXPR_LIST (VOIDmode, sub,
3316 purge_addressof_replacements));
3317 return true;
3319 goto restart;
3323 give_up:
3324 /* Scan all subexpressions. */
3325 fmt = GET_RTX_FORMAT (code);
3326 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3328 if (*fmt == 'e')
3329 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0,
3330 may_postpone, ht);
3331 else if (*fmt == 'E')
3332 for (j = 0; j < XVECLEN (x, i); j++)
3333 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0,
3334 may_postpone, ht);
3337 return result;
3340 /* Return a hash value for K, a REG. */
3342 static hashval_t
3343 insns_for_mem_hash (const void *k)
3345 /* Use the address of the key for the hash value. */
3346 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3347 return htab_hash_pointer (m->key);
3350 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3352 static int
3353 insns_for_mem_comp (const void *k1, const void *k2)
3355 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3356 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3357 return m1->key == m2->key;
3360 struct insns_for_mem_walk_info
3362 /* The hash table that we are using to record which INSNs use which
3363 MEMs. */
3364 htab_t ht;
3366 /* The INSN we are currently processing. */
3367 rtx insn;
3369 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3370 to find the insns that use the REGs in the ADDRESSOFs. */
3371 int pass;
3374 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3375 that might be used in an ADDRESSOF expression, record this INSN in
3376 the hash table given by DATA (which is really a pointer to an
3377 insns_for_mem_walk_info structure). */
3379 static int
3380 insns_for_mem_walk (rtx *r, void *data)
3382 struct insns_for_mem_walk_info *ifmwi
3383 = (struct insns_for_mem_walk_info *) data;
3384 struct insns_for_mem_entry tmp;
3385 tmp.insns = NULL_RTX;
3387 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3388 && REG_P (XEXP (*r, 0)))
3390 void **e;
3391 tmp.key = XEXP (*r, 0);
3392 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3393 if (*e == NULL)
3395 *e = ggc_alloc (sizeof (tmp));
3396 memcpy (*e, &tmp, sizeof (tmp));
3399 else if (ifmwi->pass == 1 && *r && REG_P (*r))
3401 struct insns_for_mem_entry *ifme;
3402 tmp.key = *r;
3403 ifme = htab_find (ifmwi->ht, &tmp);
3405 /* If we have not already recorded this INSN, do so now. Since
3406 we process the INSNs in order, we know that if we have
3407 recorded it it must be at the front of the list. */
3408 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3409 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3410 ifme->insns);
3413 return 0;
3416 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3417 which REGs in HT. */
3419 static void
3420 compute_insns_for_mem (rtx insns, rtx last_insn, htab_t ht)
3422 rtx insn;
3423 struct insns_for_mem_walk_info ifmwi;
3424 ifmwi.ht = ht;
3426 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3427 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3428 if (INSN_P (insn))
3430 ifmwi.insn = insn;
3431 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3435 /* Helper function for purge_addressof called through for_each_rtx.
3436 Returns true iff the rtl is an ADDRESSOF. */
3438 static int
3439 is_addressof (rtx *rtl, void *data ATTRIBUTE_UNUSED)
3441 return GET_CODE (*rtl) == ADDRESSOF;
3444 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3445 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3446 stack. */
3448 void
3449 purge_addressof (rtx insns)
3451 rtx insn, tmp;
3452 htab_t ht;
3454 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3455 requires a fixup pass over the instruction stream to correct
3456 INSNs that depended on the REG being a REG, and not a MEM. But,
3457 these fixup passes are slow. Furthermore, most MEMs are not
3458 mentioned in very many instructions. So, we speed up the process
3459 by pre-calculating which REGs occur in which INSNs; that allows
3460 us to perform the fixup passes much more quickly. */
3461 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3462 compute_insns_for_mem (insns, NULL_RTX, ht);
3464 postponed_insns = NULL;
3466 for (insn = insns; insn; insn = NEXT_INSN (insn))
3467 if (INSN_P (insn))
3469 if (! purge_addressof_1 (&PATTERN (insn), insn,
3470 asm_noperands (PATTERN (insn)) > 0, 0, 1, ht))
3471 /* If we could not replace the ADDRESSOFs in the insn,
3472 something is wrong. */
3473 abort ();
3475 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, 0, ht))
3477 /* If we could not replace the ADDRESSOFs in the insn's notes,
3478 we can just remove the offending notes instead. */
3479 rtx note;
3481 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3483 /* If we find a REG_RETVAL note then the insn is a libcall.
3484 Such insns must have REG_EQUAL notes as well, in order
3485 for later passes of the compiler to work. So it is not
3486 safe to delete the notes here, and instead we abort. */
3487 if (REG_NOTE_KIND (note) == REG_RETVAL)
3488 abort ();
3489 if (for_each_rtx (&note, is_addressof, NULL))
3490 remove_note (insn, note);
3495 /* Process the postponed insns. */
3496 while (postponed_insns)
3498 insn = XEXP (postponed_insns, 0);
3499 tmp = postponed_insns;
3500 postponed_insns = XEXP (postponed_insns, 1);
3501 free_INSN_LIST_node (tmp);
3503 if (! purge_addressof_1 (&PATTERN (insn), insn,
3504 asm_noperands (PATTERN (insn)) > 0, 0, 0, ht))
3505 abort ();
3508 /* Clean up. */
3509 purge_bitfield_addressof_replacements = 0;
3510 purge_addressof_replacements = 0;
3512 /* REGs are shared. purge_addressof will destructively replace a REG
3513 with a MEM, which creates shared MEMs.
3515 Unfortunately, the children of put_reg_into_stack assume that MEMs
3516 referring to the same stack slot are shared (fixup_var_refs and
3517 the associated hash table code).
3519 So, we have to do another unsharing pass after we have flushed any
3520 REGs that had their address taken into the stack.
3522 It may be worth tracking whether or not we converted any REGs into
3523 MEMs to avoid this overhead when it is not needed. */
3524 unshare_all_rtl_again (get_insns ());
3527 /* Convert a SET of a hard subreg to a set of the appropriate hard
3528 register. A subroutine of purge_hard_subreg_sets. */
3530 static void
3531 purge_single_hard_subreg_set (rtx pattern)
3533 rtx reg = SET_DEST (pattern);
3534 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3535 int offset = 0;
3537 if (GET_CODE (reg) == SUBREG && REG_P (SUBREG_REG (reg))
3538 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3540 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3541 GET_MODE (SUBREG_REG (reg)),
3542 SUBREG_BYTE (reg),
3543 GET_MODE (reg));
3544 reg = SUBREG_REG (reg);
3548 if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3550 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3551 SET_DEST (pattern) = reg;
3555 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3556 only such SETs that we expect to see are those left in because
3557 integrate can't handle sets of parts of a return value register.
3559 We don't use alter_subreg because we only want to eliminate subregs
3560 of hard registers. */
3562 void
3563 purge_hard_subreg_sets (rtx insn)
3565 for (; insn; insn = NEXT_INSN (insn))
3567 if (INSN_P (insn))
3569 rtx pattern = PATTERN (insn);
3570 switch (GET_CODE (pattern))
3572 case SET:
3573 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3574 purge_single_hard_subreg_set (pattern);
3575 break;
3576 case PARALLEL:
3578 int j;
3579 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3581 rtx inner_pattern = XVECEXP (pattern, 0, j);
3582 if (GET_CODE (inner_pattern) == SET
3583 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3584 purge_single_hard_subreg_set (inner_pattern);
3587 break;
3588 default:
3589 break;
3595 /* Pass through the INSNS of function FNDECL and convert virtual register
3596 references to hard register references. */
3598 void
3599 instantiate_virtual_regs (void)
3601 rtx insn;
3602 unsigned int i;
3604 /* Compute the offsets to use for this function. */
3605 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
3606 var_offset = STARTING_FRAME_OFFSET;
3607 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
3608 out_arg_offset = STACK_POINTER_OFFSET;
3609 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
3611 /* Scan all variables and parameters of this function. For each that is
3612 in memory, instantiate all virtual registers if the result is a valid
3613 address. If not, we do it later. That will handle most uses of virtual
3614 regs on many machines. */
3615 instantiate_decls (current_function_decl, 1);
3617 /* Initialize recognition, indicating that volatile is OK. */
3618 init_recog ();
3620 /* Scan through all the insns, instantiating every virtual register still
3621 present. */
3622 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3623 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3624 || GET_CODE (insn) == CALL_INSN)
3626 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3627 if (INSN_DELETED_P (insn))
3628 continue;
3629 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3630 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3631 if (GET_CODE (insn) == CALL_INSN)
3632 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3633 NULL_RTX, 0);
3635 /* Past this point all ASM statements should match. Verify that
3636 to avoid failures later in the compilation process. */
3637 if (asm_noperands (PATTERN (insn)) >= 0
3638 && ! check_asm_operands (PATTERN (insn)))
3639 instantiate_virtual_regs_lossage (insn);
3642 /* Instantiate the stack slots for the parm registers, for later use in
3643 addressof elimination. */
3644 for (i = 0; i < max_parm_reg; ++i)
3645 if (parm_reg_stack_loc[i])
3646 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3648 /* Now instantiate the remaining register equivalences for debugging info.
3649 These will not be valid addresses. */
3650 instantiate_decls (current_function_decl, 0);
3652 /* Indicate that, from now on, assign_stack_local should use
3653 frame_pointer_rtx. */
3654 virtuals_instantiated = 1;
3657 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3658 all virtual registers in their DECL_RTL's.
3660 If VALID_ONLY, do this only if the resulting address is still valid.
3661 Otherwise, always do it. */
3663 static void
3664 instantiate_decls (tree fndecl, int valid_only)
3666 tree decl;
3668 /* Process all parameters of the function. */
3669 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3671 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3672 HOST_WIDE_INT size_rtl;
3674 instantiate_decl (DECL_RTL (decl), size, valid_only);
3676 /* If the parameter was promoted, then the incoming RTL mode may be
3677 larger than the declared type size. We must use the larger of
3678 the two sizes. */
3679 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3680 size = MAX (size_rtl, size);
3681 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3684 /* Now process all variables defined in the function or its subblocks. */
3685 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3688 /* Subroutine of instantiate_decls: Process all decls in the given
3689 BLOCK node and all its subblocks. */
3691 static void
3692 instantiate_decls_1 (tree let, int valid_only)
3694 tree t;
3696 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3697 if (DECL_RTL_SET_P (t))
3698 instantiate_decl (DECL_RTL (t),
3699 int_size_in_bytes (TREE_TYPE (t)),
3700 valid_only);
3702 /* Process all subblocks. */
3703 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3704 instantiate_decls_1 (t, valid_only);
3707 /* Subroutine of the preceding procedures: Given RTL representing a
3708 decl and the size of the object, do any instantiation required.
3710 If VALID_ONLY is nonzero, it means that the RTL should only be
3711 changed if the new address is valid. */
3713 static void
3714 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
3716 enum machine_mode mode;
3717 rtx addr;
3719 /* If this is not a MEM, no need to do anything. Similarly if the
3720 address is a constant or a register that is not a virtual register. */
3722 if (x == 0 || GET_CODE (x) != MEM)
3723 return;
3725 addr = XEXP (x, 0);
3726 if (CONSTANT_P (addr)
3727 || (GET_CODE (addr) == ADDRESSOF && REG_P (XEXP (addr, 0)))
3728 || (REG_P (addr)
3729 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3730 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3731 return;
3733 /* If we should only do this if the address is valid, copy the address.
3734 We need to do this so we can undo any changes that might make the
3735 address invalid. This copy is unfortunate, but probably can't be
3736 avoided. */
3738 if (valid_only)
3739 addr = copy_rtx (addr);
3741 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3743 if (valid_only && size >= 0)
3745 unsigned HOST_WIDE_INT decl_size = size;
3747 /* Now verify that the resulting address is valid for every integer or
3748 floating-point mode up to and including SIZE bytes long. We do this
3749 since the object might be accessed in any mode and frame addresses
3750 are shared. */
3752 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3753 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3754 mode = GET_MODE_WIDER_MODE (mode))
3755 if (! memory_address_p (mode, addr))
3756 return;
3758 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3759 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3760 mode = GET_MODE_WIDER_MODE (mode))
3761 if (! memory_address_p (mode, addr))
3762 return;
3765 /* Put back the address now that we have updated it and we either know
3766 it is valid or we don't care whether it is valid. */
3768 XEXP (x, 0) = addr;
3771 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3772 is a virtual register, return the equivalent hard register and set the
3773 offset indirectly through the pointer. Otherwise, return 0. */
3775 static rtx
3776 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
3778 rtx new;
3779 HOST_WIDE_INT offset;
3781 if (x == virtual_incoming_args_rtx)
3782 new = arg_pointer_rtx, offset = in_arg_offset;
3783 else if (x == virtual_stack_vars_rtx)
3784 new = frame_pointer_rtx, offset = var_offset;
3785 else if (x == virtual_stack_dynamic_rtx)
3786 new = stack_pointer_rtx, offset = dynamic_offset;
3787 else if (x == virtual_outgoing_args_rtx)
3788 new = stack_pointer_rtx, offset = out_arg_offset;
3789 else if (x == virtual_cfa_rtx)
3790 new = arg_pointer_rtx, offset = cfa_offset;
3791 else
3792 return 0;
3794 *poffset = offset;
3795 return new;
3799 /* Called when instantiate_virtual_regs has failed to update the instruction.
3800 Usually this means that non-matching instruction has been emit, however for
3801 asm statements it may be the problem in the constraints. */
3802 static void
3803 instantiate_virtual_regs_lossage (rtx insn)
3805 if (asm_noperands (PATTERN (insn)) >= 0)
3807 error_for_asm (insn, "impossible constraint in `asm'");
3808 delete_insn (insn);
3810 else
3811 abort ();
3813 /* Given a pointer to a piece of rtx and an optional pointer to the
3814 containing object, instantiate any virtual registers present in it.
3816 If EXTRA_INSNS, we always do the replacement and generate
3817 any extra insns before OBJECT. If it zero, we do nothing if replacement
3818 is not valid.
3820 Return 1 if we either had nothing to do or if we were able to do the
3821 needed replacement. Return 0 otherwise; we only return zero if
3822 EXTRA_INSNS is zero.
3824 We first try some simple transformations to avoid the creation of extra
3825 pseudos. */
3827 static int
3828 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
3830 rtx x;
3831 RTX_CODE code;
3832 rtx new = 0;
3833 HOST_WIDE_INT offset = 0;
3834 rtx temp;
3835 rtx seq;
3836 int i, j;
3837 const char *fmt;
3839 /* Re-start here to avoid recursion in common cases. */
3840 restart:
3842 x = *loc;
3843 if (x == 0)
3844 return 1;
3846 /* We may have detected and deleted invalid asm statements. */
3847 if (object && INSN_P (object) && INSN_DELETED_P (object))
3848 return 1;
3850 code = GET_CODE (x);
3852 /* Check for some special cases. */
3853 switch (code)
3855 case CONST_INT:
3856 case CONST_DOUBLE:
3857 case CONST_VECTOR:
3858 case CONST:
3859 case SYMBOL_REF:
3860 case CODE_LABEL:
3861 case PC:
3862 case CC0:
3863 case ASM_INPUT:
3864 case ADDR_VEC:
3865 case ADDR_DIFF_VEC:
3866 case RETURN:
3867 return 1;
3869 case SET:
3870 /* We are allowed to set the virtual registers. This means that
3871 the actual register should receive the source minus the
3872 appropriate offset. This is used, for example, in the handling
3873 of non-local gotos. */
3874 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3876 rtx src = SET_SRC (x);
3878 /* We are setting the register, not using it, so the relevant
3879 offset is the negative of the offset to use were we using
3880 the register. */
3881 offset = - offset;
3882 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3884 /* The only valid sources here are PLUS or REG. Just do
3885 the simplest possible thing to handle them. */
3886 if (!REG_P (src) && GET_CODE (src) != PLUS)
3888 instantiate_virtual_regs_lossage (object);
3889 return 1;
3892 start_sequence ();
3893 if (!REG_P (src))
3894 temp = force_operand (src, NULL_RTX);
3895 else
3896 temp = src;
3897 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3898 seq = get_insns ();
3899 end_sequence ();
3901 emit_insn_before (seq, object);
3902 SET_DEST (x) = new;
3904 if (! validate_change (object, &SET_SRC (x), temp, 0)
3905 || ! extra_insns)
3906 instantiate_virtual_regs_lossage (object);
3908 return 1;
3911 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3912 loc = &SET_SRC (x);
3913 goto restart;
3915 case PLUS:
3916 /* Handle special case of virtual register plus constant. */
3917 if (CONSTANT_P (XEXP (x, 1)))
3919 rtx old, new_offset;
3921 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3922 if (GET_CODE (XEXP (x, 0)) == PLUS)
3924 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3926 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3927 extra_insns);
3928 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3930 else
3932 loc = &XEXP (x, 0);
3933 goto restart;
3937 #ifdef POINTERS_EXTEND_UNSIGNED
3938 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3939 we can commute the PLUS and SUBREG because pointers into the
3940 frame are well-behaved. */
3941 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3942 && GET_CODE (XEXP (x, 1)) == CONST_INT
3943 && 0 != (new
3944 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3945 &offset))
3946 && validate_change (object, loc,
3947 plus_constant (gen_lowpart (ptr_mode,
3948 new),
3949 offset
3950 + INTVAL (XEXP (x, 1))),
3952 return 1;
3953 #endif
3954 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3956 /* We know the second operand is a constant. Unless the
3957 first operand is a REG (which has been already checked),
3958 it needs to be checked. */
3959 if (!REG_P (XEXP (x, 0)))
3961 loc = &XEXP (x, 0);
3962 goto restart;
3964 return 1;
3967 new_offset = plus_constant (XEXP (x, 1), offset);
3969 /* If the new constant is zero, try to replace the sum with just
3970 the register. */
3971 if (new_offset == const0_rtx
3972 && validate_change (object, loc, new, 0))
3973 return 1;
3975 /* Next try to replace the register and new offset.
3976 There are two changes to validate here and we can't assume that
3977 in the case of old offset equals new just changing the register
3978 will yield a valid insn. In the interests of a little efficiency,
3979 however, we only call validate change once (we don't queue up the
3980 changes and then call apply_change_group). */
3982 old = XEXP (x, 0);
3983 if (offset == 0
3984 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3985 : (XEXP (x, 0) = new,
3986 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3988 if (! extra_insns)
3990 XEXP (x, 0) = old;
3991 return 0;
3994 /* Otherwise copy the new constant into a register and replace
3995 constant with that register. */
3996 temp = gen_reg_rtx (Pmode);
3997 XEXP (x, 0) = new;
3998 if (validate_change (object, &XEXP (x, 1), temp, 0))
3999 emit_insn_before (gen_move_insn (temp, new_offset), object);
4000 else
4002 /* If that didn't work, replace this expression with a
4003 register containing the sum. */
4005 XEXP (x, 0) = old;
4006 new = gen_rtx_PLUS (Pmode, new, new_offset);
4008 start_sequence ();
4009 temp = force_operand (new, NULL_RTX);
4010 seq = get_insns ();
4011 end_sequence ();
4013 emit_insn_before (seq, object);
4014 if (! validate_change (object, loc, temp, 0)
4015 && ! validate_replace_rtx (x, temp, object))
4017 instantiate_virtual_regs_lossage (object);
4018 return 1;
4023 return 1;
4026 /* Fall through to generic two-operand expression case. */
4027 case EXPR_LIST:
4028 case CALL:
4029 case COMPARE:
4030 case MINUS:
4031 case MULT:
4032 case DIV: case UDIV:
4033 case MOD: case UMOD:
4034 case AND: case IOR: case XOR:
4035 case ROTATERT: case ROTATE:
4036 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4037 case NE: case EQ:
4038 case GE: case GT: case GEU: case GTU:
4039 case LE: case LT: case LEU: case LTU:
4040 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
4041 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
4042 loc = &XEXP (x, 0);
4043 goto restart;
4045 case MEM:
4046 /* Most cases of MEM that convert to valid addresses have already been
4047 handled by our scan of decls. The only special handling we
4048 need here is to make a copy of the rtx to ensure it isn't being
4049 shared if we have to change it to a pseudo.
4051 If the rtx is a simple reference to an address via a virtual register,
4052 it can potentially be shared. In such cases, first try to make it
4053 a valid address, which can also be shared. Otherwise, copy it and
4054 proceed normally.
4056 First check for common cases that need no processing. These are
4057 usually due to instantiation already being done on a previous instance
4058 of a shared rtx. */
4060 temp = XEXP (x, 0);
4061 if (CONSTANT_ADDRESS_P (temp)
4062 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4063 || temp == arg_pointer_rtx
4064 #endif
4065 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4066 || temp == hard_frame_pointer_rtx
4067 #endif
4068 || temp == frame_pointer_rtx)
4069 return 1;
4071 if (GET_CODE (temp) == PLUS
4072 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4073 && (XEXP (temp, 0) == frame_pointer_rtx
4074 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4075 || XEXP (temp, 0) == hard_frame_pointer_rtx
4076 #endif
4077 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4078 || XEXP (temp, 0) == arg_pointer_rtx
4079 #endif
4081 return 1;
4083 if (temp == virtual_stack_vars_rtx
4084 || temp == virtual_incoming_args_rtx
4085 || (GET_CODE (temp) == PLUS
4086 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4087 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4088 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4090 /* This MEM may be shared. If the substitution can be done without
4091 the need to generate new pseudos, we want to do it in place
4092 so all copies of the shared rtx benefit. The call below will
4093 only make substitutions if the resulting address is still
4094 valid.
4096 Note that we cannot pass X as the object in the recursive call
4097 since the insn being processed may not allow all valid
4098 addresses. However, if we were not passed on object, we can
4099 only modify X without copying it if X will have a valid
4100 address.
4102 ??? Also note that this can still lose if OBJECT is an insn that
4103 has less restrictions on an address that some other insn.
4104 In that case, we will modify the shared address. This case
4105 doesn't seem very likely, though. One case where this could
4106 happen is in the case of a USE or CLOBBER reference, but we
4107 take care of that below. */
4109 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4110 object ? object : x, 0))
4111 return 1;
4113 /* Otherwise make a copy and process that copy. We copy the entire
4114 RTL expression since it might be a PLUS which could also be
4115 shared. */
4116 *loc = x = copy_rtx (x);
4119 /* Fall through to generic unary operation case. */
4120 case PREFETCH:
4121 case SUBREG:
4122 case STRICT_LOW_PART:
4123 case NEG: case NOT:
4124 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4125 case SIGN_EXTEND: case ZERO_EXTEND:
4126 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4127 case FLOAT: case FIX:
4128 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4129 case ABS:
4130 case SQRT:
4131 case FFS:
4132 case CLZ: case CTZ:
4133 case POPCOUNT: case PARITY:
4134 /* These case either have just one operand or we know that we need not
4135 check the rest of the operands. */
4136 loc = &XEXP (x, 0);
4137 goto restart;
4139 case USE:
4140 case CLOBBER:
4141 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4142 go ahead and make the invalid one, but do it to a copy. For a REG,
4143 just make the recursive call, since there's no chance of a problem. */
4145 if ((GET_CODE (XEXP (x, 0)) == MEM
4146 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4148 || (REG_P (XEXP (x, 0))
4149 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4150 return 1;
4152 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4153 loc = &XEXP (x, 0);
4154 goto restart;
4156 case REG:
4157 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4158 in front of this insn and substitute the temporary. */
4159 if ((new = instantiate_new_reg (x, &offset)) != 0)
4161 temp = plus_constant (new, offset);
4162 if (!validate_change (object, loc, temp, 0))
4164 if (! extra_insns)
4165 return 0;
4167 start_sequence ();
4168 temp = force_operand (temp, NULL_RTX);
4169 seq = get_insns ();
4170 end_sequence ();
4172 emit_insn_before (seq, object);
4173 if (! validate_change (object, loc, temp, 0)
4174 && ! validate_replace_rtx (x, temp, object))
4175 instantiate_virtual_regs_lossage (object);
4179 return 1;
4181 case ADDRESSOF:
4182 if (REG_P (XEXP (x, 0)))
4183 return 1;
4185 else if (GET_CODE (XEXP (x, 0)) == MEM)
4187 /* If we have a (addressof (mem ..)), do any instantiation inside
4188 since we know we'll be making the inside valid when we finally
4189 remove the ADDRESSOF. */
4190 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4191 return 1;
4193 break;
4195 default:
4196 break;
4199 /* Scan all subexpressions. */
4200 fmt = GET_RTX_FORMAT (code);
4201 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4202 if (*fmt == 'e')
4204 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4205 return 0;
4207 else if (*fmt == 'E')
4208 for (j = 0; j < XVECLEN (x, i); j++)
4209 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4210 extra_insns))
4211 return 0;
4213 return 1;
4216 /* Return the first insn following those generated by `assign_parms'. */
4219 get_first_nonparm_insn (void)
4221 if (last_parm_insn)
4222 return NEXT_INSN (last_parm_insn);
4223 return get_insns ();
4226 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4227 This means a type for which function calls must pass an address to the
4228 function or get an address back from the function.
4229 EXP may be a type node or an expression (whose type is tested). */
4232 aggregate_value_p (tree exp, tree fntype)
4234 int i, regno, nregs;
4235 rtx reg;
4237 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4239 if (fntype)
4240 switch (TREE_CODE (fntype))
4242 case CALL_EXPR:
4243 fntype = get_callee_fndecl (fntype);
4244 fntype = fntype ? TREE_TYPE (fntype) : 0;
4245 break;
4246 case FUNCTION_DECL:
4247 fntype = TREE_TYPE (fntype);
4248 break;
4249 case FUNCTION_TYPE:
4250 case METHOD_TYPE:
4251 break;
4252 case IDENTIFIER_NODE:
4253 fntype = 0;
4254 break;
4255 default:
4256 /* We don't expect other rtl types here. */
4257 abort();
4260 if (TREE_CODE (type) == VOID_TYPE)
4261 return 0;
4262 if (targetm.calls.return_in_memory (type, fntype))
4263 return 1;
4264 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4265 and thus can't be returned in registers. */
4266 if (TREE_ADDRESSABLE (type))
4267 return 1;
4268 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4269 return 1;
4270 /* Make sure we have suitable call-clobbered regs to return
4271 the value in; if not, we must return it in memory. */
4272 reg = hard_function_value (type, 0, 0);
4274 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4275 it is OK. */
4276 if (!REG_P (reg))
4277 return 0;
4279 regno = REGNO (reg);
4280 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
4281 for (i = 0; i < nregs; i++)
4282 if (! call_used_regs[regno + i])
4283 return 1;
4284 return 0;
4287 /* Assign RTL expressions to the function's parameters.
4288 This may involve copying them into registers and using
4289 those registers as the RTL for them. */
4291 void
4292 assign_parms (tree fndecl)
4294 tree parm;
4295 CUMULATIVE_ARGS args_so_far;
4296 /* Total space needed so far for args on the stack,
4297 given as a constant and a tree-expression. */
4298 struct args_size stack_args_size;
4299 HOST_WIDE_INT extra_pretend_bytes = 0;
4300 tree fntype = TREE_TYPE (fndecl);
4301 tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
4302 /* This is used for the arg pointer when referring to stack args. */
4303 rtx internal_arg_pointer;
4304 /* This is a dummy PARM_DECL that we used for the function result if
4305 the function returns a structure. */
4306 tree function_result_decl = 0;
4307 int varargs_setup = 0;
4308 int reg_parm_stack_space ATTRIBUTE_UNUSED = 0;
4309 rtx conversion_insns = 0;
4311 /* Nonzero if function takes extra anonymous args.
4312 This means the last named arg must be on the stack
4313 right before the anonymous ones. */
4314 int stdarg = current_function_stdarg;
4316 /* If the reg that the virtual arg pointer will be translated into is
4317 not a fixed reg or is the stack pointer, make a copy of the virtual
4318 arg pointer, and address parms via the copy. The frame pointer is
4319 considered fixed even though it is not marked as such.
4321 The second time through, simply use ap to avoid generating rtx. */
4323 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4324 || ! (fixed_regs[ARG_POINTER_REGNUM]
4325 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4326 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4327 else
4328 internal_arg_pointer = virtual_incoming_args_rtx;
4329 current_function_internal_arg_pointer = internal_arg_pointer;
4331 stack_args_size.constant = 0;
4332 stack_args_size.var = 0;
4334 /* If struct value address is treated as the first argument, make it so. */
4335 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
4336 && ! current_function_returns_pcc_struct
4337 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
4339 tree type = build_pointer_type (TREE_TYPE (fntype));
4341 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4343 DECL_ARG_TYPE (function_result_decl) = type;
4344 TREE_CHAIN (function_result_decl) = fnargs;
4345 fnargs = function_result_decl;
4348 orig_fnargs = fnargs;
4350 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4351 parm_reg_stack_loc = ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4353 /* If the target wants to split complex arguments into scalars, do so. */
4354 if (targetm.calls.split_complex_arg)
4355 fnargs = split_complex_args (fnargs);
4357 #ifdef REG_PARM_STACK_SPACE
4358 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4359 #endif
4361 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4362 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4363 #else
4364 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl, -1);
4365 #endif
4367 /* We haven't yet found an argument that we must push and pretend the
4368 caller did. */
4369 current_function_pretend_args_size = 0;
4371 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4373 rtx entry_parm;
4374 rtx stack_parm;
4375 enum machine_mode promoted_mode, passed_mode;
4376 enum machine_mode nominal_mode, promoted_nominal_mode;
4377 int unsignedp;
4378 struct locate_and_pad_arg_data locate;
4379 int passed_pointer = 0;
4380 int did_conversion = 0;
4381 tree passed_type = DECL_ARG_TYPE (parm);
4382 tree nominal_type = TREE_TYPE (parm);
4383 int last_named = 0, named_arg;
4384 int in_regs;
4385 int partial = 0;
4386 int pretend_bytes = 0;
4387 int loaded_in_reg = 0;
4389 /* Set LAST_NAMED if this is last named arg before last
4390 anonymous args. */
4391 if (stdarg)
4393 tree tem;
4395 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4396 if (DECL_NAME (tem))
4397 break;
4399 if (tem == 0)
4400 last_named = 1;
4402 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4403 most machines, if this is a varargs/stdarg function, then we treat
4404 the last named arg as if it were anonymous too. */
4405 named_arg = (targetm.calls.strict_argument_naming (&args_so_far)
4406 ? 1 : !last_named);
4408 if (TREE_TYPE (parm) == error_mark_node
4409 /* This can happen after weird syntax errors
4410 or if an enum type is defined among the parms. */
4411 || TREE_CODE (parm) != PARM_DECL
4412 || passed_type == NULL)
4414 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4415 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4416 TREE_USED (parm) = 1;
4417 continue;
4420 /* Find mode of arg as it is passed, and mode of arg
4421 as it should be during execution of this function. */
4422 passed_mode = TYPE_MODE (passed_type);
4423 nominal_mode = TYPE_MODE (nominal_type);
4425 /* If the parm's mode is VOID, its value doesn't matter,
4426 and avoid the usual things like emit_move_insn that could crash. */
4427 if (nominal_mode == VOIDmode)
4429 SET_DECL_RTL (parm, const0_rtx);
4430 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4431 continue;
4434 /* If the parm is to be passed as a transparent union, use the
4435 type of the first field for the tests below. We have already
4436 verified that the modes are the same. */
4437 if (DECL_TRANSPARENT_UNION (parm)
4438 || (TREE_CODE (passed_type) == UNION_TYPE
4439 && TYPE_TRANSPARENT_UNION (passed_type)))
4440 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4442 /* See if this arg was passed by invisible reference. It is if
4443 it is an object whose size depends on the contents of the
4444 object itself or if the machine requires these objects be passed
4445 that way. */
4447 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
4448 || TREE_ADDRESSABLE (passed_type)
4449 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4450 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4451 passed_type, named_arg)
4452 #endif
4455 passed_type = nominal_type = build_pointer_type (passed_type);
4456 passed_pointer = 1;
4457 passed_mode = nominal_mode = Pmode;
4459 /* See if the frontend wants to pass this by invisible reference. */
4460 else if (passed_type != nominal_type
4461 && POINTER_TYPE_P (passed_type)
4462 && TREE_TYPE (passed_type) == nominal_type)
4464 nominal_type = passed_type;
4465 passed_pointer = 1;
4466 passed_mode = nominal_mode = Pmode;
4469 promoted_mode = passed_mode;
4471 if (targetm.calls.promote_function_args (TREE_TYPE (fndecl)))
4473 /* Compute the mode in which the arg is actually extended to. */
4474 unsignedp = TYPE_UNSIGNED (passed_type);
4475 promoted_mode = promote_mode (passed_type, promoted_mode,
4476 &unsignedp, 1);
4479 /* Let machine desc say which reg (if any) the parm arrives in.
4480 0 means it arrives on the stack. */
4481 #ifdef FUNCTION_INCOMING_ARG
4482 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4483 passed_type, named_arg);
4484 #else
4485 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4486 passed_type, named_arg);
4487 #endif
4489 if (entry_parm == 0)
4490 promoted_mode = passed_mode;
4492 /* If this is the last named parameter, do any required setup for
4493 varargs or stdargs. We need to know about the case of this being an
4494 addressable type, in which case we skip the registers it
4495 would have arrived in.
4497 For stdargs, LAST_NAMED will be set for two parameters, the one that
4498 is actually the last named, and the dummy parameter. We only
4499 want to do this action once.
4501 Also, indicate when RTL generation is to be suppressed. */
4502 if (last_named && !varargs_setup)
4504 int varargs_pretend_bytes = 0;
4505 targetm.calls.setup_incoming_varargs (&args_so_far, promoted_mode,
4506 passed_type,
4507 &varargs_pretend_bytes, 0);
4508 varargs_setup = 1;
4510 /* If the back-end has requested extra stack space, record how
4511 much is needed. Do not change pretend_args_size otherwise
4512 since it may be nonzero from an earlier partial argument. */
4513 if (varargs_pretend_bytes > 0)
4514 current_function_pretend_args_size = varargs_pretend_bytes;
4517 /* Determine parm's home in the stack,
4518 in case it arrives in the stack or we should pretend it did.
4520 Compute the stack position and rtx where the argument arrives
4521 and its size.
4523 There is one complexity here: If this was a parameter that would
4524 have been passed in registers, but wasn't only because it is
4525 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4526 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4527 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4528 0 as it was the previous time. */
4529 in_regs = entry_parm != 0;
4530 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4531 in_regs = 1;
4532 #endif
4533 if (!in_regs && !named_arg)
4535 int pretend_named =
4536 targetm.calls.pretend_outgoing_varargs_named (&args_so_far);
4537 if (pretend_named)
4539 #ifdef FUNCTION_INCOMING_ARG
4540 in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4541 passed_type,
4542 pretend_named) != 0;
4543 #else
4544 in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
4545 passed_type,
4546 pretend_named) != 0;
4547 #endif
4551 /* If this parameter was passed both in registers and in the stack,
4552 use the copy on the stack. */
4553 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4554 entry_parm = 0;
4556 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4557 if (entry_parm)
4559 partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4560 passed_type, named_arg);
4561 if (partial
4562 /* The caller might already have allocated stack space
4563 for the register parameters. */
4564 && reg_parm_stack_space == 0)
4566 /* Part of this argument is passed in registers and part
4567 is passed on the stack. Ask the prologue code to extend
4568 the stack part so that we can recreate the full value.
4570 PRETEND_BYTES is the size of the registers we need to store.
4571 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
4572 stack space that the prologue should allocate.
4574 Internally, gcc assumes that the argument pointer is
4575 aligned to STACK_BOUNDARY bits. This is used both for
4576 alignment optimizations (see init_emit) and to locate
4577 arguments that are aligned to more than PARM_BOUNDARY
4578 bits. We must preserve this invariant by rounding
4579 CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to a stack
4580 boundary. */
4582 /* We assume at most one partial arg, and it must be the first
4583 argument on the stack. */
4584 if (extra_pretend_bytes || current_function_pretend_args_size)
4585 abort ();
4587 pretend_bytes = partial * UNITS_PER_WORD;
4588 current_function_pretend_args_size
4589 = CEIL_ROUND (pretend_bytes, STACK_BYTES);
4591 /* We want to align relative to the actual stack pointer, so
4592 don't include this in the stack size until later. */
4593 extra_pretend_bytes = current_function_pretend_args_size;
4596 #endif
4598 memset (&locate, 0, sizeof (locate));
4599 locate_and_pad_parm (promoted_mode, passed_type, in_regs,
4600 entry_parm ? partial : 0, fndecl,
4601 &stack_args_size, &locate);
4602 /* Adjust offsets to include the pretend args. */
4603 locate.slot_offset.constant += extra_pretend_bytes - pretend_bytes;
4604 locate.offset.constant += extra_pretend_bytes - pretend_bytes;
4607 rtx offset_rtx;
4609 /* If we're passing this arg using a reg, make its stack home
4610 the aligned stack slot. */
4611 if (entry_parm)
4612 offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
4613 else
4614 offset_rtx = ARGS_SIZE_RTX (locate.offset);
4616 if (offset_rtx == const0_rtx)
4617 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4618 else
4619 stack_parm = gen_rtx_MEM (promoted_mode,
4620 gen_rtx_PLUS (Pmode,
4621 internal_arg_pointer,
4622 offset_rtx));
4624 set_mem_attributes (stack_parm, parm, 1);
4625 set_mem_align (stack_parm,
4626 FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type));
4627 if (entry_parm)
4628 set_reg_attrs_for_parm (entry_parm, stack_parm);
4631 /* If this parm was passed part in regs and part in memory,
4632 pretend it arrived entirely in memory
4633 by pushing the register-part onto the stack.
4635 In the special case of a DImode or DFmode that is split,
4636 we could put it together in a pseudoreg directly,
4637 but for now that's not worth bothering with. */
4639 if (partial)
4641 /* Handle calls that pass values in multiple non-contiguous
4642 locations. The Irix 6 ABI has examples of this. */
4643 if (GET_CODE (entry_parm) == PARALLEL)
4644 emit_group_store (validize_mem (stack_parm), entry_parm,
4645 TREE_TYPE (parm),
4646 int_size_in_bytes (TREE_TYPE (parm)));
4648 else
4649 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
4650 partial);
4652 entry_parm = stack_parm;
4655 /* If we didn't decide this parm came in a register,
4656 by default it came on the stack. */
4657 if (entry_parm == 0)
4658 entry_parm = stack_parm;
4660 /* Record permanently how this parm was passed. */
4661 set_decl_incoming_rtl (parm, entry_parm);
4663 /* If there is actually space on the stack for this parm,
4664 count it in stack_args_size; otherwise set stack_parm to 0
4665 to indicate there is no preallocated stack slot for the parm. */
4667 if (entry_parm == stack_parm
4668 || (GET_CODE (entry_parm) == PARALLEL
4669 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4670 #if defined (REG_PARM_STACK_SPACE)
4671 /* On some machines, even if a parm value arrives in a register
4672 there is still an (uninitialized) stack slot allocated
4673 for it. */
4674 || REG_PARM_STACK_SPACE (fndecl) > 0
4675 #endif
4678 stack_args_size.constant += locate.size.constant;
4679 if (locate.size.var)
4680 ADD_PARM_SIZE (stack_args_size, locate.size.var);
4682 else
4683 /* No stack slot was pushed for this parm. */
4684 stack_parm = 0;
4686 /* Update info on where next arg arrives in registers. */
4688 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4689 passed_type, named_arg);
4691 /* If we can't trust the parm stack slot to be aligned enough
4692 for its ultimate type, don't use that slot after entry.
4693 We'll make another stack slot, if we need one. */
4694 if (STRICT_ALIGNMENT && stack_parm
4695 && GET_MODE_ALIGNMENT (nominal_mode) > MEM_ALIGN (stack_parm))
4696 stack_parm = 0;
4698 /* If parm was passed in memory, and we need to convert it on entry,
4699 don't store it back in that same slot. */
4700 if (entry_parm == stack_parm
4701 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4702 stack_parm = 0;
4704 /* When an argument is passed in multiple locations, we can't
4705 make use of this information, but we can save some copying if
4706 the whole argument is passed in a single register. */
4707 if (GET_CODE (entry_parm) == PARALLEL
4708 && nominal_mode != BLKmode && passed_mode != BLKmode)
4710 int i, len = XVECLEN (entry_parm, 0);
4712 for (i = 0; i < len; i++)
4713 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4714 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
4715 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4716 == passed_mode)
4717 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4719 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4720 set_decl_incoming_rtl (parm, entry_parm);
4721 break;
4725 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4726 in the mode in which it arrives.
4727 STACK_PARM is an RTX for a stack slot where the parameter can live
4728 during the function (in case we want to put it there).
4729 STACK_PARM is 0 if no stack slot was pushed for it.
4731 Now output code if necessary to convert ENTRY_PARM to
4732 the type in which this function declares it,
4733 and store that result in an appropriate place,
4734 which may be a pseudo reg, may be STACK_PARM,
4735 or may be a local stack slot if STACK_PARM is 0.
4737 Set DECL_RTL to that place. */
4739 if (GET_CODE (entry_parm) == PARALLEL && nominal_mode != BLKmode
4740 && XVECLEN (entry_parm, 0) > 1)
4742 /* Reconstitute objects the size of a register or larger using
4743 register operations instead of the stack. */
4744 rtx parmreg = gen_reg_rtx (nominal_mode);
4746 if (REG_P (parmreg))
4748 unsigned int regno = REGNO (parmreg);
4750 emit_group_store (parmreg, entry_parm, TREE_TYPE (parm),
4751 int_size_in_bytes (TREE_TYPE (parm)));
4752 SET_DECL_RTL (parm, parmreg);
4753 loaded_in_reg = 1;
4755 if (regno >= max_parm_reg)
4757 rtx *new;
4758 int old_max_parm_reg = max_parm_reg;
4760 /* It's slow to expand this one register at a time,
4761 but it's also rare and we need max_parm_reg to be
4762 precisely correct. */
4763 max_parm_reg = regno + 1;
4764 new = ggc_realloc (parm_reg_stack_loc,
4765 max_parm_reg * sizeof (rtx));
4766 memset (new + old_max_parm_reg, 0,
4767 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4768 parm_reg_stack_loc = new;
4769 parm_reg_stack_loc[regno] = stack_parm;
4774 if (nominal_mode == BLKmode
4775 #ifdef BLOCK_REG_PADDING
4776 || (locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
4777 && GET_MODE_SIZE (promoted_mode) < UNITS_PER_WORD)
4778 #endif
4779 || GET_CODE (entry_parm) == PARALLEL)
4781 /* If a BLKmode arrives in registers, copy it to a stack slot.
4782 Handle calls that pass values in multiple non-contiguous
4783 locations. The Irix 6 ABI has examples of this. */
4784 if (REG_P (entry_parm)
4785 || (GET_CODE (entry_parm) == PARALLEL
4786 && (!loaded_in_reg || !optimize)))
4788 int size = int_size_in_bytes (TREE_TYPE (parm));
4789 int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
4790 rtx mem;
4792 /* Note that we will be storing an integral number of words.
4793 So we have to be careful to ensure that we allocate an
4794 integral number of words. We do this below in the
4795 assign_stack_local if space was not allocated in the argument
4796 list. If it was, this will not work if PARM_BOUNDARY is not
4797 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4798 if it becomes a problem. Exception is when BLKmode arrives
4799 with arguments not conforming to word_mode. */
4801 if (stack_parm == 0)
4803 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
4804 PUT_MODE (stack_parm, GET_MODE (entry_parm));
4805 set_mem_attributes (stack_parm, parm, 1);
4807 else if (GET_CODE (entry_parm) == PARALLEL
4808 && GET_MODE(entry_parm) == BLKmode)
4810 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4811 abort ();
4813 mem = validize_mem (stack_parm);
4815 /* Handle calls that pass values in multiple non-contiguous
4816 locations. The Irix 6 ABI has examples of this. */
4817 if (GET_CODE (entry_parm) == PARALLEL)
4818 emit_group_store (mem, entry_parm, TREE_TYPE (parm), size);
4820 else if (size == 0)
4823 /* If SIZE is that of a mode no bigger than a word, just use
4824 that mode's store operation. */
4825 else if (size <= UNITS_PER_WORD)
4827 enum machine_mode mode
4828 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
4830 if (mode != BLKmode
4831 #ifdef BLOCK_REG_PADDING
4832 && (size == UNITS_PER_WORD
4833 || (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
4834 != (BYTES_BIG_ENDIAN ? upward : downward)))
4835 #endif
4838 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
4839 emit_move_insn (change_address (mem, mode, 0), reg);
4842 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4843 machine must be aligned to the left before storing
4844 to memory. Note that the previous test doesn't
4845 handle all cases (e.g. SIZE == 3). */
4846 else if (size != UNITS_PER_WORD
4847 #ifdef BLOCK_REG_PADDING
4848 && (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
4849 == downward)
4850 #else
4851 && BYTES_BIG_ENDIAN
4852 #endif
4855 rtx tem, x;
4856 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4857 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
4859 x = expand_binop (word_mode, ashl_optab, reg,
4860 GEN_INT (by), 0, 1, OPTAB_WIDEN);
4861 tem = change_address (mem, word_mode, 0);
4862 emit_move_insn (tem, x);
4864 else
4865 move_block_from_reg (REGNO (entry_parm), mem,
4866 size_stored / UNITS_PER_WORD);
4868 else
4869 move_block_from_reg (REGNO (entry_parm), mem,
4870 size_stored / UNITS_PER_WORD);
4872 /* If parm is already bound to register pair, don't change
4873 this binding. */
4874 if (! DECL_RTL_SET_P (parm))
4875 SET_DECL_RTL (parm, stack_parm);
4877 else if (! ((! optimize
4878 && ! DECL_REGISTER (parm))
4879 || TREE_SIDE_EFFECTS (parm)
4880 /* If -ffloat-store specified, don't put explicit
4881 float variables into registers. */
4882 || (flag_float_store
4883 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4884 /* Always assign pseudo to structure return or item passed
4885 by invisible reference. */
4886 || passed_pointer || parm == function_result_decl)
4888 /* Store the parm in a pseudoregister during the function, but we
4889 may need to do it in a wider mode. */
4891 rtx parmreg;
4892 unsigned int regno, regnoi = 0, regnor = 0;
4894 unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
4896 promoted_nominal_mode
4897 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4899 parmreg = gen_reg_rtx (promoted_nominal_mode);
4900 mark_user_reg (parmreg);
4902 /* If this was an item that we received a pointer to, set DECL_RTL
4903 appropriately. */
4904 if (passed_pointer)
4906 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4907 parmreg);
4908 set_mem_attributes (x, parm, 1);
4909 SET_DECL_RTL (parm, x);
4911 else
4913 SET_DECL_RTL (parm, parmreg);
4914 maybe_set_unchanging (DECL_RTL (parm), parm);
4917 /* Copy the value into the register. */
4918 if (nominal_mode != passed_mode
4919 || promoted_nominal_mode != promoted_mode)
4921 int save_tree_used;
4922 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4923 mode, by the caller. We now have to convert it to
4924 NOMINAL_MODE, if different. However, PARMREG may be in
4925 a different mode than NOMINAL_MODE if it is being stored
4926 promoted.
4928 If ENTRY_PARM is a hard register, it might be in a register
4929 not valid for operating in its mode (e.g., an odd-numbered
4930 register for a DFmode). In that case, moves are the only
4931 thing valid, so we can't do a convert from there. This
4932 occurs when the calling sequence allow such misaligned
4933 usages.
4935 In addition, the conversion may involve a call, which could
4936 clobber parameters which haven't been copied to pseudo
4937 registers yet. Therefore, we must first copy the parm to
4938 a pseudo reg here, and save the conversion until after all
4939 parameters have been moved. */
4941 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4943 emit_move_insn (tempreg, validize_mem (entry_parm));
4945 push_to_sequence (conversion_insns);
4946 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4948 if (GET_CODE (tempreg) == SUBREG
4949 && GET_MODE (tempreg) == nominal_mode
4950 && REG_P (SUBREG_REG (tempreg))
4951 && nominal_mode == passed_mode
4952 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4953 && GET_MODE_SIZE (GET_MODE (tempreg))
4954 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4956 /* The argument is already sign/zero extended, so note it
4957 into the subreg. */
4958 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4959 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4962 /* TREE_USED gets set erroneously during expand_assignment. */
4963 save_tree_used = TREE_USED (parm);
4964 expand_assignment (parm,
4965 make_tree (nominal_type, tempreg), 0);
4966 TREE_USED (parm) = save_tree_used;
4967 conversion_insns = get_insns ();
4968 did_conversion = 1;
4969 end_sequence ();
4971 else
4972 emit_move_insn (parmreg, validize_mem (entry_parm));
4974 /* If we were passed a pointer but the actual value
4975 can safely live in a register, put it in one. */
4976 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4977 /* If by-reference argument was promoted, demote it. */
4978 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4979 || ! ((! optimize
4980 && ! DECL_REGISTER (parm))
4981 || TREE_SIDE_EFFECTS (parm)
4982 /* If -ffloat-store specified, don't put explicit
4983 float variables into registers. */
4984 || (flag_float_store
4985 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4987 /* We can't use nominal_mode, because it will have been set to
4988 Pmode above. We must use the actual mode of the parm. */
4989 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4990 mark_user_reg (parmreg);
4991 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4993 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4994 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
4995 push_to_sequence (conversion_insns);
4996 emit_move_insn (tempreg, DECL_RTL (parm));
4997 SET_DECL_RTL (parm,
4998 convert_to_mode (GET_MODE (parmreg),
4999 tempreg,
5000 unsigned_p));
5001 emit_move_insn (parmreg, DECL_RTL (parm));
5002 conversion_insns = get_insns();
5003 did_conversion = 1;
5004 end_sequence ();
5006 else
5007 emit_move_insn (parmreg, DECL_RTL (parm));
5008 SET_DECL_RTL (parm, parmreg);
5009 /* STACK_PARM is the pointer, not the parm, and PARMREG is
5010 now the parm. */
5011 stack_parm = 0;
5013 #ifdef FUNCTION_ARG_CALLEE_COPIES
5014 /* If we are passed an arg by reference and it is our responsibility
5015 to make a copy, do it now.
5016 PASSED_TYPE and PASSED mode now refer to the pointer, not the
5017 original argument, so we must recreate them in the call to
5018 FUNCTION_ARG_CALLEE_COPIES. */
5019 /* ??? Later add code to handle the case that if the argument isn't
5020 modified, don't do the copy. */
5022 else if (passed_pointer
5023 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
5024 TYPE_MODE (TREE_TYPE (passed_type)),
5025 TREE_TYPE (passed_type),
5026 named_arg)
5027 && ! TREE_ADDRESSABLE (TREE_TYPE (passed_type)))
5029 rtx copy;
5030 tree type = TREE_TYPE (passed_type);
5032 /* This sequence may involve a library call perhaps clobbering
5033 registers that haven't been copied to pseudos yet. */
5035 push_to_sequence (conversion_insns);
5037 if (!COMPLETE_TYPE_P (type)
5038 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5039 /* This is a variable sized object. */
5040 copy = gen_rtx_MEM (BLKmode,
5041 allocate_dynamic_stack_space
5042 (expr_size (parm), NULL_RTX,
5043 TYPE_ALIGN (type)));
5044 else
5045 copy = assign_stack_temp (TYPE_MODE (type),
5046 int_size_in_bytes (type), 1);
5047 set_mem_attributes (copy, parm, 1);
5049 store_expr (parm, copy, 0);
5050 emit_move_insn (parmreg, XEXP (copy, 0));
5051 conversion_insns = get_insns ();
5052 did_conversion = 1;
5053 end_sequence ();
5055 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5057 /* In any case, record the parm's desired stack location
5058 in case we later discover it must live in the stack.
5060 If it is a COMPLEX value, store the stack location for both
5061 halves. */
5063 if (GET_CODE (parmreg) == CONCAT)
5064 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
5065 else
5066 regno = REGNO (parmreg);
5068 if (regno >= max_parm_reg)
5070 rtx *new;
5071 int old_max_parm_reg = max_parm_reg;
5073 /* It's slow to expand this one register at a time,
5074 but it's also rare and we need max_parm_reg to be
5075 precisely correct. */
5076 max_parm_reg = regno + 1;
5077 new = ggc_realloc (parm_reg_stack_loc,
5078 max_parm_reg * sizeof (rtx));
5079 memset (new + old_max_parm_reg, 0,
5080 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5081 parm_reg_stack_loc = new;
5084 if (GET_CODE (parmreg) == CONCAT)
5086 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5088 regnor = REGNO (gen_realpart (submode, parmreg));
5089 regnoi = REGNO (gen_imagpart (submode, parmreg));
5091 if (stack_parm != 0)
5093 parm_reg_stack_loc[regnor]
5094 = gen_realpart (submode, stack_parm);
5095 parm_reg_stack_loc[regnoi]
5096 = gen_imagpart (submode, stack_parm);
5098 else
5100 parm_reg_stack_loc[regnor] = 0;
5101 parm_reg_stack_loc[regnoi] = 0;
5104 else
5105 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5107 /* Mark the register as eliminable if we did no conversion
5108 and it was copied from memory at a fixed offset,
5109 and the arg pointer was not copied to a pseudo-reg.
5110 If the arg pointer is a pseudo reg or the offset formed
5111 an invalid address, such memory-equivalences
5112 as we make here would screw up life analysis for it. */
5113 if (nominal_mode == passed_mode
5114 && ! did_conversion
5115 && stack_parm != 0
5116 && GET_CODE (stack_parm) == MEM
5117 && locate.offset.var == 0
5118 && reg_mentioned_p (virtual_incoming_args_rtx,
5119 XEXP (stack_parm, 0)))
5121 rtx linsn = get_last_insn ();
5122 rtx sinsn, set;
5124 /* Mark complex types separately. */
5125 if (GET_CODE (parmreg) == CONCAT)
5126 /* Scan backwards for the set of the real and
5127 imaginary parts. */
5128 for (sinsn = linsn; sinsn != 0;
5129 sinsn = prev_nonnote_insn (sinsn))
5131 set = single_set (sinsn);
5132 if (set != 0
5133 && SET_DEST (set) == regno_reg_rtx [regnoi])
5134 REG_NOTES (sinsn)
5135 = gen_rtx_EXPR_LIST (REG_EQUIV,
5136 parm_reg_stack_loc[regnoi],
5137 REG_NOTES (sinsn));
5138 else if (set != 0
5139 && SET_DEST (set) == regno_reg_rtx [regnor])
5140 REG_NOTES (sinsn)
5141 = gen_rtx_EXPR_LIST (REG_EQUIV,
5142 parm_reg_stack_loc[regnor],
5143 REG_NOTES (sinsn));
5145 else if ((set = single_set (linsn)) != 0
5146 && SET_DEST (set) == parmreg)
5147 REG_NOTES (linsn)
5148 = gen_rtx_EXPR_LIST (REG_EQUIV,
5149 stack_parm, REG_NOTES (linsn));
5152 /* For pointer data type, suggest pointer register. */
5153 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5154 mark_reg_pointer (parmreg,
5155 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5157 /* If something wants our address, try to use ADDRESSOF. */
5158 if (TREE_ADDRESSABLE (parm))
5160 /* If we end up putting something into the stack,
5161 fixup_var_refs_insns will need to make a pass over
5162 all the instructions. It looks through the pending
5163 sequences -- but it can't see the ones in the
5164 CONVERSION_INSNS, if they're not on the sequence
5165 stack. So, we go back to that sequence, just so that
5166 the fixups will happen. */
5167 push_to_sequence (conversion_insns);
5168 put_var_into_stack (parm, /*rescan=*/true);
5169 conversion_insns = get_insns ();
5170 end_sequence ();
5173 else
5175 /* Value must be stored in the stack slot STACK_PARM
5176 during function execution. */
5178 if (promoted_mode != nominal_mode)
5180 /* Conversion is required. */
5181 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5183 emit_move_insn (tempreg, validize_mem (entry_parm));
5185 push_to_sequence (conversion_insns);
5186 entry_parm = convert_to_mode (nominal_mode, tempreg,
5187 TYPE_UNSIGNED (TREE_TYPE (parm)));
5188 if (stack_parm)
5189 /* ??? This may need a big-endian conversion on sparc64. */
5190 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5192 conversion_insns = get_insns ();
5193 did_conversion = 1;
5194 end_sequence ();
5197 if (entry_parm != stack_parm)
5199 if (stack_parm == 0)
5201 stack_parm
5202 = assign_stack_local (GET_MODE (entry_parm),
5203 GET_MODE_SIZE (GET_MODE (entry_parm)),
5205 set_mem_attributes (stack_parm, parm, 1);
5208 if (promoted_mode != nominal_mode)
5210 push_to_sequence (conversion_insns);
5211 emit_move_insn (validize_mem (stack_parm),
5212 validize_mem (entry_parm));
5213 conversion_insns = get_insns ();
5214 end_sequence ();
5216 else
5217 emit_move_insn (validize_mem (stack_parm),
5218 validize_mem (entry_parm));
5221 SET_DECL_RTL (parm, stack_parm);
5225 if (targetm.calls.split_complex_arg && fnargs != orig_fnargs)
5227 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
5229 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
5230 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
5232 rtx tmp, real, imag;
5233 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
5235 real = DECL_RTL (fnargs);
5236 imag = DECL_RTL (TREE_CHAIN (fnargs));
5237 if (inner != GET_MODE (real))
5239 real = gen_lowpart_SUBREG (inner, real);
5240 imag = gen_lowpart_SUBREG (inner, imag);
5242 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5243 SET_DECL_RTL (parm, tmp);
5245 real = DECL_INCOMING_RTL (fnargs);
5246 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
5247 if (inner != GET_MODE (real))
5249 real = gen_lowpart_SUBREG (inner, real);
5250 imag = gen_lowpart_SUBREG (inner, imag);
5252 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5253 set_decl_incoming_rtl (parm, tmp);
5254 fnargs = TREE_CHAIN (fnargs);
5256 else
5258 SET_DECL_RTL (parm, DECL_RTL (fnargs));
5259 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
5261 /* Set MEM_EXPR to the original decl, i.e. to PARM,
5262 instead of the copy of decl, i.e. FNARGS. */
5263 if (DECL_INCOMING_RTL (parm)
5264 && GET_CODE (DECL_INCOMING_RTL (parm)) == MEM)
5265 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
5267 fnargs = TREE_CHAIN (fnargs);
5271 /* Output all parameter conversion instructions (possibly including calls)
5272 now that all parameters have been copied out of hard registers. */
5273 emit_insn (conversion_insns);
5275 /* If we are receiving a struct value address as the first argument, set up
5276 the RTL for the function result. As this might require code to convert
5277 the transmitted address to Pmode, we do this here to ensure that possible
5278 preliminary conversions of the address have been emitted already. */
5279 if (function_result_decl)
5281 tree result = DECL_RESULT (fndecl);
5282 rtx addr = DECL_RTL (function_result_decl);
5283 rtx x;
5285 addr = convert_memory_address (Pmode, addr);
5286 x = gen_rtx_MEM (DECL_MODE (result), addr);
5287 set_mem_attributes (x, result, 1);
5288 SET_DECL_RTL (result, x);
5291 last_parm_insn = get_last_insn ();
5293 /* We have aligned all the args, so add space for the pretend args. */
5294 stack_args_size.constant += extra_pretend_bytes;
5295 current_function_args_size = stack_args_size.constant;
5297 /* Adjust function incoming argument size for alignment and
5298 minimum length. */
5300 #ifdef REG_PARM_STACK_SPACE
5301 current_function_args_size = MAX (current_function_args_size,
5302 REG_PARM_STACK_SPACE (fndecl));
5303 #endif
5305 current_function_args_size
5306 = ((current_function_args_size + STACK_BYTES - 1)
5307 / STACK_BYTES) * STACK_BYTES;
5309 #ifdef ARGS_GROW_DOWNWARD
5310 current_function_arg_offset_rtx
5311 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5312 : expand_expr (size_diffop (stack_args_size.var,
5313 size_int (-stack_args_size.constant)),
5314 NULL_RTX, VOIDmode, 0));
5315 #else
5316 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5317 #endif
5319 /* See how many bytes, if any, of its args a function should try to pop
5320 on return. */
5322 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5323 current_function_args_size);
5325 /* For stdarg.h function, save info about
5326 regs and stack space used by the named args. */
5328 current_function_args_info = args_so_far;
5330 /* Set the rtx used for the function return value. Put this in its
5331 own variable so any optimizers that need this information don't have
5332 to include tree.h. Do this here so it gets done when an inlined
5333 function gets output. */
5335 current_function_return_rtx
5336 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5337 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5339 /* If scalar return value was computed in a pseudo-reg, or was a named
5340 return value that got dumped to the stack, copy that to the hard
5341 return register. */
5342 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5344 tree decl_result = DECL_RESULT (fndecl);
5345 rtx decl_rtl = DECL_RTL (decl_result);
5347 if (REG_P (decl_rtl)
5348 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5349 : DECL_REGISTER (decl_result))
5351 rtx real_decl_rtl;
5353 #ifdef FUNCTION_OUTGOING_VALUE
5354 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5355 fndecl);
5356 #else
5357 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5358 fndecl);
5359 #endif
5360 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5361 /* The delay slot scheduler assumes that current_function_return_rtx
5362 holds the hard register containing the return value, not a
5363 temporary pseudo. */
5364 current_function_return_rtx = real_decl_rtl;
5369 /* If ARGS contains entries with complex types, split the entry into two
5370 entries of the component type. Return a new list of substitutions are
5371 needed, else the old list. */
5373 static tree
5374 split_complex_args (tree args)
5376 tree p;
5378 /* Before allocating memory, check for the common case of no complex. */
5379 for (p = args; p; p = TREE_CHAIN (p))
5381 tree type = TREE_TYPE (p);
5382 if (TREE_CODE (type) == COMPLEX_TYPE
5383 && targetm.calls.split_complex_arg (type))
5384 goto found;
5386 return args;
5388 found:
5389 args = copy_list (args);
5391 for (p = args; p; p = TREE_CHAIN (p))
5393 tree type = TREE_TYPE (p);
5394 if (TREE_CODE (type) == COMPLEX_TYPE
5395 && targetm.calls.split_complex_arg (type))
5397 tree decl;
5398 tree subtype = TREE_TYPE (type);
5400 /* Rewrite the PARM_DECL's type with its component. */
5401 TREE_TYPE (p) = subtype;
5402 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
5403 DECL_MODE (p) = VOIDmode;
5404 DECL_SIZE (p) = NULL;
5405 DECL_SIZE_UNIT (p) = NULL;
5406 layout_decl (p, 0);
5408 /* Build a second synthetic decl. */
5409 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
5410 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
5411 layout_decl (decl, 0);
5413 /* Splice it in; skip the new decl. */
5414 TREE_CHAIN (decl) = TREE_CHAIN (p);
5415 TREE_CHAIN (p) = decl;
5416 p = decl;
5420 return args;
5423 /* Indicate whether REGNO is an incoming argument to the current function
5424 that was promoted to a wider mode. If so, return the RTX for the
5425 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5426 that REGNO is promoted from and whether the promotion was signed or
5427 unsigned. */
5430 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
5432 tree arg;
5434 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5435 arg = TREE_CHAIN (arg))
5436 if (REG_P (DECL_INCOMING_RTL (arg))
5437 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5438 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5440 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5441 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
5443 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5444 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5445 && mode != DECL_MODE (arg))
5447 *pmode = DECL_MODE (arg);
5448 *punsignedp = unsignedp;
5449 return DECL_INCOMING_RTL (arg);
5453 return 0;
5457 /* Compute the size and offset from the start of the stacked arguments for a
5458 parm passed in mode PASSED_MODE and with type TYPE.
5460 INITIAL_OFFSET_PTR points to the current offset into the stacked
5461 arguments.
5463 The starting offset and size for this parm are returned in
5464 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5465 nonzero, the offset is that of stack slot, which is returned in
5466 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5467 padding required from the initial offset ptr to the stack slot.
5469 IN_REGS is nonzero if the argument will be passed in registers. It will
5470 never be set if REG_PARM_STACK_SPACE is not defined.
5472 FNDECL is the function in which the argument was defined.
5474 There are two types of rounding that are done. The first, controlled by
5475 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5476 list to be aligned to the specific boundary (in bits). This rounding
5477 affects the initial and starting offsets, but not the argument size.
5479 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5480 optionally rounds the size of the parm to PARM_BOUNDARY. The
5481 initial offset is not affected by this rounding, while the size always
5482 is and the starting offset may be. */
5484 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5485 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5486 callers pass in the total size of args so far as
5487 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5489 void
5490 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
5491 int partial, tree fndecl ATTRIBUTE_UNUSED,
5492 struct args_size *initial_offset_ptr,
5493 struct locate_and_pad_arg_data *locate)
5495 tree sizetree;
5496 enum direction where_pad;
5497 int boundary;
5498 int reg_parm_stack_space = 0;
5499 int part_size_in_regs;
5501 #ifdef REG_PARM_STACK_SPACE
5502 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5504 /* If we have found a stack parm before we reach the end of the
5505 area reserved for registers, skip that area. */
5506 if (! in_regs)
5508 if (reg_parm_stack_space > 0)
5510 if (initial_offset_ptr->var)
5512 initial_offset_ptr->var
5513 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5514 ssize_int (reg_parm_stack_space));
5515 initial_offset_ptr->constant = 0;
5517 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5518 initial_offset_ptr->constant = reg_parm_stack_space;
5521 #endif /* REG_PARM_STACK_SPACE */
5523 part_size_in_regs = 0;
5524 if (reg_parm_stack_space == 0)
5525 part_size_in_regs = ((partial * UNITS_PER_WORD)
5526 / (PARM_BOUNDARY / BITS_PER_UNIT)
5527 * (PARM_BOUNDARY / BITS_PER_UNIT));
5529 sizetree
5530 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5531 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5532 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5533 locate->where_pad = where_pad;
5535 #ifdef ARGS_GROW_DOWNWARD
5536 locate->slot_offset.constant = -initial_offset_ptr->constant;
5537 if (initial_offset_ptr->var)
5538 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
5539 initial_offset_ptr->var);
5542 tree s2 = sizetree;
5543 if (where_pad != none
5544 && (!host_integerp (sizetree, 1)
5545 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5546 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5547 SUB_PARM_SIZE (locate->slot_offset, s2);
5550 locate->slot_offset.constant += part_size_in_regs;
5552 if (!in_regs
5553 #ifdef REG_PARM_STACK_SPACE
5554 || REG_PARM_STACK_SPACE (fndecl) > 0
5555 #endif
5557 pad_to_arg_alignment (&locate->slot_offset, boundary,
5558 &locate->alignment_pad);
5560 locate->size.constant = (-initial_offset_ptr->constant
5561 - locate->slot_offset.constant);
5562 if (initial_offset_ptr->var)
5563 locate->size.var = size_binop (MINUS_EXPR,
5564 size_binop (MINUS_EXPR,
5565 ssize_int (0),
5566 initial_offset_ptr->var),
5567 locate->slot_offset.var);
5569 /* Pad_below needs the pre-rounded size to know how much to pad
5570 below. */
5571 locate->offset = locate->slot_offset;
5572 if (where_pad == downward)
5573 pad_below (&locate->offset, passed_mode, sizetree);
5575 #else /* !ARGS_GROW_DOWNWARD */
5576 if (!in_regs
5577 #ifdef REG_PARM_STACK_SPACE
5578 || REG_PARM_STACK_SPACE (fndecl) > 0
5579 #endif
5581 pad_to_arg_alignment (initial_offset_ptr, boundary,
5582 &locate->alignment_pad);
5583 locate->slot_offset = *initial_offset_ptr;
5585 #ifdef PUSH_ROUNDING
5586 if (passed_mode != BLKmode)
5587 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5588 #endif
5590 /* Pad_below needs the pre-rounded size to know how much to pad below
5591 so this must be done before rounding up. */
5592 locate->offset = locate->slot_offset;
5593 if (where_pad == downward)
5594 pad_below (&locate->offset, passed_mode, sizetree);
5596 if (where_pad != none
5597 && (!host_integerp (sizetree, 1)
5598 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5599 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5601 ADD_PARM_SIZE (locate->size, sizetree);
5603 locate->size.constant -= part_size_in_regs;
5604 #endif /* ARGS_GROW_DOWNWARD */
5607 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5608 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5610 static void
5611 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
5612 struct args_size *alignment_pad)
5614 tree save_var = NULL_TREE;
5615 HOST_WIDE_INT save_constant = 0;
5616 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5617 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
5619 #ifdef SPARC_STACK_BOUNDARY_HACK
5620 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
5621 higher than the real alignment of %sp. However, when it does this,
5622 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
5623 This is a temporary hack while the sparc port is fixed. */
5624 if (SPARC_STACK_BOUNDARY_HACK)
5625 sp_offset = 0;
5626 #endif
5628 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5630 save_var = offset_ptr->var;
5631 save_constant = offset_ptr->constant;
5634 alignment_pad->var = NULL_TREE;
5635 alignment_pad->constant = 0;
5637 if (boundary > BITS_PER_UNIT)
5639 if (offset_ptr->var)
5641 tree sp_offset_tree = ssize_int (sp_offset);
5642 tree offset = size_binop (PLUS_EXPR,
5643 ARGS_SIZE_TREE (*offset_ptr),
5644 sp_offset_tree);
5645 #ifdef ARGS_GROW_DOWNWARD
5646 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
5647 #else
5648 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
5649 #endif
5651 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
5652 /* ARGS_SIZE_TREE includes constant term. */
5653 offset_ptr->constant = 0;
5654 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5655 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5656 save_var);
5658 else
5660 offset_ptr->constant = -sp_offset +
5661 #ifdef ARGS_GROW_DOWNWARD
5662 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
5663 #else
5664 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
5665 #endif
5666 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5667 alignment_pad->constant = offset_ptr->constant - save_constant;
5672 static void
5673 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
5675 if (passed_mode != BLKmode)
5677 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5678 offset_ptr->constant
5679 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5680 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5681 - GET_MODE_SIZE (passed_mode));
5683 else
5685 if (TREE_CODE (sizetree) != INTEGER_CST
5686 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5688 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5689 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5690 /* Add it in. */
5691 ADD_PARM_SIZE (*offset_ptr, s2);
5692 SUB_PARM_SIZE (*offset_ptr, sizetree);
5697 /* Walk the tree of blocks describing the binding levels within a function
5698 and warn about variables the might be killed by setjmp or vfork.
5699 This is done after calling flow_analysis and before global_alloc
5700 clobbers the pseudo-regs to hard regs. */
5702 void
5703 setjmp_vars_warning (tree block)
5705 tree decl, sub;
5707 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5709 if (TREE_CODE (decl) == VAR_DECL
5710 && DECL_RTL_SET_P (decl)
5711 && REG_P (DECL_RTL (decl))
5712 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5713 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
5714 decl, decl);
5717 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5718 setjmp_vars_warning (sub);
5721 /* Do the appropriate part of setjmp_vars_warning
5722 but for arguments instead of local variables. */
5724 void
5725 setjmp_args_warning (void)
5727 tree decl;
5728 for (decl = DECL_ARGUMENTS (current_function_decl);
5729 decl; decl = TREE_CHAIN (decl))
5730 if (DECL_RTL (decl) != 0
5731 && REG_P (DECL_RTL (decl))
5732 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5733 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
5734 decl, decl);
5737 /* If this function call setjmp, put all vars into the stack
5738 unless they were declared `register'. */
5740 void
5741 setjmp_protect (tree block)
5743 tree decl, sub;
5744 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5745 if ((TREE_CODE (decl) == VAR_DECL
5746 || TREE_CODE (decl) == PARM_DECL)
5747 && DECL_RTL (decl) != 0
5748 && (REG_P (DECL_RTL (decl))
5749 || (GET_CODE (DECL_RTL (decl)) == MEM
5750 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5751 /* If this variable came from an inline function, it must be
5752 that its life doesn't overlap the setjmp. If there was a
5753 setjmp in the function, it would already be in memory. We
5754 must exclude such variable because their DECL_RTL might be
5755 set to strange things such as virtual_stack_vars_rtx. */
5756 && ! DECL_FROM_INLINE (decl)
5757 && (
5758 #ifdef NON_SAVING_SETJMP
5759 /* If longjmp doesn't restore the registers,
5760 don't put anything in them. */
5761 NON_SAVING_SETJMP
5763 #endif
5764 ! DECL_REGISTER (decl)))
5765 put_var_into_stack (decl, /*rescan=*/true);
5766 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5767 setjmp_protect (sub);
5770 /* Like the previous function, but for args instead of local variables. */
5772 void
5773 setjmp_protect_args (void)
5775 tree decl;
5776 for (decl = DECL_ARGUMENTS (current_function_decl);
5777 decl; decl = TREE_CHAIN (decl))
5778 if ((TREE_CODE (decl) == VAR_DECL
5779 || TREE_CODE (decl) == PARM_DECL)
5780 && DECL_RTL (decl) != 0
5781 && (REG_P (DECL_RTL (decl))
5782 || (GET_CODE (DECL_RTL (decl)) == MEM
5783 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5784 && (
5785 /* If longjmp doesn't restore the registers,
5786 don't put anything in them. */
5787 #ifdef NON_SAVING_SETJMP
5788 NON_SAVING_SETJMP
5790 #endif
5791 ! DECL_REGISTER (decl)))
5792 put_var_into_stack (decl, /*rescan=*/true);
5795 /* Convert a stack slot address ADDR for variable VAR
5796 (from a containing function)
5797 into an address valid in this function (using a static chain). */
5800 fix_lexical_addr (rtx addr, tree var)
5802 rtx basereg;
5803 HOST_WIDE_INT displacement;
5804 tree context = decl_function_context (var);
5805 struct function *fp;
5806 rtx base = 0;
5808 /* If this is the present function, we need not do anything. */
5809 if (context == current_function_decl)
5810 return addr;
5812 fp = find_function_data (context);
5814 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5815 addr = XEXP (XEXP (addr, 0), 0);
5817 /* Decode given address as base reg plus displacement. */
5818 if (REG_P (addr))
5819 basereg = addr, displacement = 0;
5820 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5821 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5822 else
5823 abort ();
5825 if (base == 0)
5826 abort ();
5828 /* Use same offset, relative to appropriate static chain or argument
5829 pointer. */
5830 return plus_constant (base, displacement);
5833 /* Put all this function's BLOCK nodes including those that are chained
5834 onto the first block into a vector, and return it.
5835 Also store in each NOTE for the beginning or end of a block
5836 the index of that block in the vector.
5837 The arguments are BLOCK, the chain of top-level blocks of the function,
5838 and INSNS, the insn chain of the function. */
5840 void
5841 identify_blocks (void)
5843 int n_blocks;
5844 tree *block_vector, *last_block_vector;
5845 tree *block_stack;
5846 tree block = DECL_INITIAL (current_function_decl);
5848 if (block == 0)
5849 return;
5851 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5852 depth-first order. */
5853 block_vector = get_block_vector (block, &n_blocks);
5854 block_stack = xmalloc (n_blocks * sizeof (tree));
5856 last_block_vector = identify_blocks_1 (get_insns (),
5857 block_vector + 1,
5858 block_vector + n_blocks,
5859 block_stack);
5861 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5862 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5863 if (0 && last_block_vector != block_vector + n_blocks)
5864 abort ();
5866 free (block_vector);
5867 free (block_stack);
5870 /* Subroutine of identify_blocks. Do the block substitution on the
5871 insn chain beginning with INSNS.
5873 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5874 BLOCK_VECTOR is incremented for each block seen. */
5876 static tree *
5877 identify_blocks_1 (rtx insns, tree *block_vector, tree *end_block_vector,
5878 tree *orig_block_stack)
5880 rtx insn;
5881 tree *block_stack = orig_block_stack;
5883 for (insn = insns; insn; insn = NEXT_INSN (insn))
5885 if (GET_CODE (insn) == NOTE)
5887 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5889 tree b;
5891 /* If there are more block notes than BLOCKs, something
5892 is badly wrong. */
5893 if (block_vector == end_block_vector)
5894 abort ();
5896 b = *block_vector++;
5897 NOTE_BLOCK (insn) = b;
5898 *block_stack++ = b;
5900 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5902 /* If there are more NOTE_INSN_BLOCK_ENDs than
5903 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5904 if (block_stack == orig_block_stack)
5905 abort ();
5907 NOTE_BLOCK (insn) = *--block_stack;
5912 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5913 something is badly wrong. */
5914 if (block_stack != orig_block_stack)
5915 abort ();
5917 return block_vector;
5920 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5921 and create duplicate blocks. */
5922 /* ??? Need an option to either create block fragments or to create
5923 abstract origin duplicates of a source block. It really depends
5924 on what optimization has been performed. */
5926 void
5927 reorder_blocks (void)
5929 tree block = DECL_INITIAL (current_function_decl);
5930 varray_type block_stack;
5932 if (block == NULL_TREE)
5933 return;
5935 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5937 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5938 clear_block_marks (block);
5940 /* Prune the old trees away, so that they don't get in the way. */
5941 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5942 BLOCK_CHAIN (block) = NULL_TREE;
5944 /* Recreate the block tree from the note nesting. */
5945 reorder_blocks_1 (get_insns (), block, &block_stack);
5946 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5948 /* Remove deleted blocks from the block fragment chains. */
5949 reorder_fix_fragments (block);
5952 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5954 void
5955 clear_block_marks (tree block)
5957 while (block)
5959 TREE_ASM_WRITTEN (block) = 0;
5960 clear_block_marks (BLOCK_SUBBLOCKS (block));
5961 block = BLOCK_CHAIN (block);
5965 static void
5966 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
5968 rtx insn;
5970 for (insn = insns; insn; insn = NEXT_INSN (insn))
5972 if (GET_CODE (insn) == NOTE)
5974 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5976 tree block = NOTE_BLOCK (insn);
5978 /* If we have seen this block before, that means it now
5979 spans multiple address regions. Create a new fragment. */
5980 if (TREE_ASM_WRITTEN (block))
5982 tree new_block = copy_node (block);
5983 tree origin;
5985 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5986 ? BLOCK_FRAGMENT_ORIGIN (block)
5987 : block);
5988 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5989 BLOCK_FRAGMENT_CHAIN (new_block)
5990 = BLOCK_FRAGMENT_CHAIN (origin);
5991 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5993 NOTE_BLOCK (insn) = new_block;
5994 block = new_block;
5997 BLOCK_SUBBLOCKS (block) = 0;
5998 TREE_ASM_WRITTEN (block) = 1;
5999 /* When there's only one block for the entire function,
6000 current_block == block and we mustn't do this, it
6001 will cause infinite recursion. */
6002 if (block != current_block)
6004 BLOCK_SUPERCONTEXT (block) = current_block;
6005 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6006 BLOCK_SUBBLOCKS (current_block) = block;
6007 current_block = block;
6009 VARRAY_PUSH_TREE (*p_block_stack, block);
6011 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6013 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6014 VARRAY_POP (*p_block_stack);
6015 BLOCK_SUBBLOCKS (current_block)
6016 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6017 current_block = BLOCK_SUPERCONTEXT (current_block);
6023 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6024 appears in the block tree, select one of the fragments to become
6025 the new origin block. */
6027 static void
6028 reorder_fix_fragments (tree block)
6030 while (block)
6032 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6033 tree new_origin = NULL_TREE;
6035 if (dup_origin)
6037 if (! TREE_ASM_WRITTEN (dup_origin))
6039 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6041 /* Find the first of the remaining fragments. There must
6042 be at least one -- the current block. */
6043 while (! TREE_ASM_WRITTEN (new_origin))
6044 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6045 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6048 else if (! dup_origin)
6049 new_origin = block;
6051 /* Re-root the rest of the fragments to the new origin. In the
6052 case that DUP_ORIGIN was null, that means BLOCK was the origin
6053 of a chain of fragments and we want to remove those fragments
6054 that didn't make it to the output. */
6055 if (new_origin)
6057 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6058 tree chain = *pp;
6060 while (chain)
6062 if (TREE_ASM_WRITTEN (chain))
6064 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6065 *pp = chain;
6066 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6068 chain = BLOCK_FRAGMENT_CHAIN (chain);
6070 *pp = NULL_TREE;
6073 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6074 block = BLOCK_CHAIN (block);
6078 /* Reverse the order of elements in the chain T of blocks,
6079 and return the new head of the chain (old last element). */
6081 tree
6082 blocks_nreverse (tree t)
6084 tree prev = 0, decl, next;
6085 for (decl = t; decl; decl = next)
6087 next = BLOCK_CHAIN (decl);
6088 BLOCK_CHAIN (decl) = prev;
6089 prev = decl;
6091 return prev;
6094 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6095 non-NULL, list them all into VECTOR, in a depth-first preorder
6096 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6097 blocks. */
6099 static int
6100 all_blocks (tree block, tree *vector)
6102 int n_blocks = 0;
6104 while (block)
6106 TREE_ASM_WRITTEN (block) = 0;
6108 /* Record this block. */
6109 if (vector)
6110 vector[n_blocks] = block;
6112 ++n_blocks;
6114 /* Record the subblocks, and their subblocks... */
6115 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6116 vector ? vector + n_blocks : 0);
6117 block = BLOCK_CHAIN (block);
6120 return n_blocks;
6123 /* Return a vector containing all the blocks rooted at BLOCK. The
6124 number of elements in the vector is stored in N_BLOCKS_P. The
6125 vector is dynamically allocated; it is the caller's responsibility
6126 to call `free' on the pointer returned. */
6128 static tree *
6129 get_block_vector (tree block, int *n_blocks_p)
6131 tree *block_vector;
6133 *n_blocks_p = all_blocks (block, NULL);
6134 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
6135 all_blocks (block, block_vector);
6137 return block_vector;
6140 static GTY(()) int next_block_index = 2;
6142 /* Set BLOCK_NUMBER for all the blocks in FN. */
6144 void
6145 number_blocks (tree fn)
6147 int i;
6148 int n_blocks;
6149 tree *block_vector;
6151 /* For SDB and XCOFF debugging output, we start numbering the blocks
6152 from 1 within each function, rather than keeping a running
6153 count. */
6154 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6155 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6156 next_block_index = 1;
6157 #endif
6159 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6161 /* The top-level BLOCK isn't numbered at all. */
6162 for (i = 1; i < n_blocks; ++i)
6163 /* We number the blocks from two. */
6164 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6166 free (block_vector);
6168 return;
6171 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6173 tree
6174 debug_find_var_in_block_tree (tree var, tree block)
6176 tree t;
6178 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6179 if (t == var)
6180 return block;
6182 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6184 tree ret = debug_find_var_in_block_tree (var, t);
6185 if (ret)
6186 return ret;
6189 return NULL_TREE;
6192 /* Allocate a function structure for FNDECL and set its contents
6193 to the defaults. */
6195 void
6196 allocate_struct_function (tree fndecl)
6198 tree result;
6199 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6201 cfun = ggc_alloc_cleared (sizeof (struct function));
6203 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6205 cfun->stack_alignment_needed = STACK_BOUNDARY;
6206 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6208 current_function_funcdef_no = funcdef_no++;
6210 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6212 init_stmt_for_function ();
6213 init_eh_for_function ();
6215 lang_hooks.function.init (cfun);
6216 if (init_machine_status)
6217 cfun->machine = (*init_machine_status) ();
6219 if (fndecl == NULL)
6220 return;
6222 DECL_STRUCT_FUNCTION (fndecl) = cfun;
6223 cfun->decl = fndecl;
6225 result = DECL_RESULT (fndecl);
6226 if (aggregate_value_p (result, fndecl))
6228 #ifdef PCC_STATIC_STRUCT_RETURN
6229 current_function_returns_pcc_struct = 1;
6230 #endif
6231 current_function_returns_struct = 1;
6234 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
6236 current_function_stdarg
6237 = (fntype
6238 && TYPE_ARG_TYPES (fntype) != 0
6239 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6240 != void_type_node));
6243 /* Reset cfun, and other non-struct-function variables to defaults as
6244 appropriate for emitting rtl at the start of a function. */
6246 static void
6247 prepare_function_start (tree fndecl)
6249 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
6250 cfun = DECL_STRUCT_FUNCTION (fndecl);
6251 else
6252 allocate_struct_function (fndecl);
6253 init_emit ();
6254 init_varasm_status (cfun);
6255 init_expr ();
6257 cse_not_expected = ! optimize;
6259 /* Caller save not needed yet. */
6260 caller_save_needed = 0;
6262 /* We haven't done register allocation yet. */
6263 reg_renumber = 0;
6265 /* Indicate that we need to distinguish between the return value of the
6266 present function and the return value of a function being called. */
6267 rtx_equal_function_value_matters = 1;
6269 /* Indicate that we have not instantiated virtual registers yet. */
6270 virtuals_instantiated = 0;
6272 /* Indicate that we want CONCATs now. */
6273 generating_concat_p = 1;
6275 /* Indicate we have no need of a frame pointer yet. */
6276 frame_pointer_needed = 0;
6279 /* Initialize the rtl expansion mechanism so that we can do simple things
6280 like generate sequences. This is used to provide a context during global
6281 initialization of some passes. */
6282 void
6283 init_dummy_function_start (void)
6285 prepare_function_start (NULL);
6288 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6289 and initialize static variables for generating RTL for the statements
6290 of the function. */
6292 void
6293 init_function_start (tree subr)
6295 prepare_function_start (subr);
6297 /* Within function body, compute a type's size as soon it is laid out. */
6298 immediate_size_expand++;
6300 /* Prevent ever trying to delete the first instruction of a
6301 function. Also tell final how to output a linenum before the
6302 function prologue. Note linenums could be missing, e.g. when
6303 compiling a Java .class file. */
6304 if (DECL_SOURCE_LINE (subr))
6305 emit_line_note (DECL_SOURCE_LOCATION (subr));
6307 /* Make sure first insn is a note even if we don't want linenums.
6308 This makes sure the first insn will never be deleted.
6309 Also, final expects a note to appear there. */
6310 emit_note (NOTE_INSN_DELETED);
6312 /* Warn if this value is an aggregate type,
6313 regardless of which calling convention we are using for it. */
6314 if (warn_aggregate_return
6315 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6316 warning ("function returns an aggregate");
6319 /* Make sure all values used by the optimization passes have sane
6320 defaults. */
6321 void
6322 init_function_for_compilation (void)
6324 reg_renumber = 0;
6326 /* No prologue/epilogue insns yet. */
6327 VARRAY_GROW (prologue, 0);
6328 VARRAY_GROW (epilogue, 0);
6329 VARRAY_GROW (sibcall_epilogue, 0);
6332 /* Expand a call to __main at the beginning of a possible main function. */
6334 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6335 #undef HAS_INIT_SECTION
6336 #define HAS_INIT_SECTION
6337 #endif
6339 void
6340 expand_main_function (void)
6342 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6343 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6345 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6346 rtx tmp, seq;
6348 start_sequence ();
6349 /* Forcibly align the stack. */
6350 #ifdef STACK_GROWS_DOWNWARD
6351 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6352 stack_pointer_rtx, 1, OPTAB_WIDEN);
6353 #else
6354 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6355 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6356 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6357 stack_pointer_rtx, 1, OPTAB_WIDEN);
6358 #endif
6359 if (tmp != stack_pointer_rtx)
6360 emit_move_insn (stack_pointer_rtx, tmp);
6362 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6363 tmp = force_reg (Pmode, const0_rtx);
6364 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6365 seq = get_insns ();
6366 end_sequence ();
6368 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6369 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6370 break;
6371 if (tmp)
6372 emit_insn_before (seq, tmp);
6373 else
6374 emit_insn (seq);
6376 #endif
6378 #ifndef HAS_INIT_SECTION
6379 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6380 #endif
6383 /* The PENDING_SIZES represent the sizes of variable-sized types.
6384 Create RTL for the various sizes now (using temporary variables),
6385 so that we can refer to the sizes from the RTL we are generating
6386 for the current function. The PENDING_SIZES are a TREE_LIST. The
6387 TREE_VALUE of each node is a SAVE_EXPR. */
6389 void
6390 expand_pending_sizes (tree pending_sizes)
6392 tree tem;
6394 /* Evaluate now the sizes of any types declared among the arguments. */
6395 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6397 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6398 /* Flush the queue in case this parameter declaration has
6399 side-effects. */
6400 emit_queue ();
6404 /* Start the RTL for a new function, and set variables used for
6405 emitting RTL.
6406 SUBR is the FUNCTION_DECL node.
6407 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6408 the function's parameters, which must be run at any return statement. */
6410 void
6411 expand_function_start (tree subr, int parms_have_cleanups)
6413 /* Make sure volatile mem refs aren't considered
6414 valid operands of arithmetic insns. */
6415 init_recog_no_volatile ();
6417 current_function_profile
6418 = (profile_flag
6419 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6421 current_function_limit_stack
6422 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6424 /* If the parameters of this function need cleaning up, get a label
6425 for the beginning of the code which executes those cleanups. This must
6426 be done before doing anything with return_label. */
6427 if (parms_have_cleanups)
6428 cleanup_label = gen_label_rtx ();
6429 else
6430 cleanup_label = 0;
6432 /* Make the label for return statements to jump to. Do not special
6433 case machines with special return instructions -- they will be
6434 handled later during jump, ifcvt, or epilogue creation. */
6435 return_label = gen_label_rtx ();
6437 /* Initialize rtx used to return the value. */
6438 /* Do this before assign_parms so that we copy the struct value address
6439 before any library calls that assign parms might generate. */
6441 /* Decide whether to return the value in memory or in a register. */
6442 if (aggregate_value_p (DECL_RESULT (subr), subr))
6444 /* Returning something that won't go in a register. */
6445 rtx value_address = 0;
6447 #ifdef PCC_STATIC_STRUCT_RETURN
6448 if (current_function_returns_pcc_struct)
6450 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6451 value_address = assemble_static_space (size);
6453 else
6454 #endif
6456 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
6457 /* Expect to be passed the address of a place to store the value.
6458 If it is passed as an argument, assign_parms will take care of
6459 it. */
6460 if (sv)
6462 value_address = gen_reg_rtx (Pmode);
6463 emit_move_insn (value_address, sv);
6466 if (value_address)
6468 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6469 set_mem_attributes (x, DECL_RESULT (subr), 1);
6470 SET_DECL_RTL (DECL_RESULT (subr), x);
6473 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6474 /* If return mode is void, this decl rtl should not be used. */
6475 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6476 else
6478 /* Compute the return values into a pseudo reg, which we will copy
6479 into the true return register after the cleanups are done. */
6481 /* In order to figure out what mode to use for the pseudo, we
6482 figure out what the mode of the eventual return register will
6483 actually be, and use that. */
6484 rtx hard_reg
6485 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6486 subr, 1);
6488 /* Structures that are returned in registers are not aggregate_value_p,
6489 so we may see a PARALLEL or a REG. */
6490 if (REG_P (hard_reg))
6491 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6492 else if (GET_CODE (hard_reg) == PARALLEL)
6493 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6494 else
6495 abort ();
6497 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6498 result to the real return register(s). */
6499 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6502 /* Initialize rtx for parameters and local variables.
6503 In some cases this requires emitting insns. */
6504 assign_parms (subr);
6506 /* If function gets a static chain arg, store it. */
6507 if (cfun->static_chain_decl)
6509 tree parm = cfun->static_chain_decl;
6510 rtx local = gen_reg_rtx (Pmode);
6512 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
6513 SET_DECL_RTL (parm, local);
6514 maybe_set_unchanging (local, parm);
6515 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6517 emit_move_insn (local, static_chain_incoming_rtx);
6520 /* If the function receives a non-local goto, then store the
6521 bits we need to restore the frame pointer. */
6522 if (cfun->nonlocal_goto_save_area)
6524 tree t_save;
6525 rtx r_save;
6527 /* ??? We need to do this save early. Unfortunately here is
6528 before the frame variable gets declared. Help out... */
6529 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
6531 t_save = build (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
6532 integer_zero_node);
6533 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6535 emit_move_insn (r_save, virtual_stack_vars_rtx);
6536 update_nonlocal_goto_save_area ();
6539 /* The following was moved from init_function_start.
6540 The move is supposed to make sdb output more accurate. */
6541 /* Indicate the beginning of the function body,
6542 as opposed to parm setup. */
6543 emit_note (NOTE_INSN_FUNCTION_BEG);
6545 if (GET_CODE (get_last_insn ()) != NOTE)
6546 emit_note (NOTE_INSN_DELETED);
6547 parm_birth_insn = get_last_insn ();
6549 if (current_function_profile)
6551 #ifdef PROFILE_HOOK
6552 PROFILE_HOOK (current_function_funcdef_no);
6553 #endif
6556 /* After the display initializations is where the tail-recursion label
6557 should go, if we end up needing one. Ensure we have a NOTE here
6558 since some things (like trampolines) get placed before this. */
6559 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
6561 /* Evaluate now the sizes of any types declared among the arguments. */
6562 expand_pending_sizes (nreverse (get_pending_sizes ()));
6564 /* Make sure there is a line number after the function entry setup code. */
6565 force_next_line_note ();
6568 /* Undo the effects of init_dummy_function_start. */
6569 void
6570 expand_dummy_function_end (void)
6572 /* End any sequences that failed to be closed due to syntax errors. */
6573 while (in_sequence_p ())
6574 end_sequence ();
6576 /* Outside function body, can't compute type's actual size
6577 until next function's body starts. */
6579 free_after_parsing (cfun);
6580 free_after_compilation (cfun);
6581 cfun = 0;
6584 /* Call DOIT for each hard register used as a return value from
6585 the current function. */
6587 void
6588 diddle_return_value (void (*doit) (rtx, void *), void *arg)
6590 rtx outgoing = current_function_return_rtx;
6592 if (! outgoing)
6593 return;
6595 if (REG_P (outgoing))
6596 (*doit) (outgoing, arg);
6597 else if (GET_CODE (outgoing) == PARALLEL)
6599 int i;
6601 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6603 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6605 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6606 (*doit) (x, arg);
6611 static void
6612 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
6614 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6617 void
6618 clobber_return_register (void)
6620 diddle_return_value (do_clobber_return_reg, NULL);
6622 /* In case we do use pseudo to return value, clobber it too. */
6623 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6625 tree decl_result = DECL_RESULT (current_function_decl);
6626 rtx decl_rtl = DECL_RTL (decl_result);
6627 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6629 do_clobber_return_reg (decl_rtl, NULL);
6634 static void
6635 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
6637 emit_insn (gen_rtx_USE (VOIDmode, reg));
6640 void
6641 use_return_register (void)
6643 diddle_return_value (do_use_return_reg, NULL);
6646 /* Possibly warn about unused parameters. */
6647 void
6648 do_warn_unused_parameter (tree fn)
6650 tree decl;
6652 for (decl = DECL_ARGUMENTS (fn);
6653 decl; decl = TREE_CHAIN (decl))
6654 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6655 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
6656 warning ("%Junused parameter '%D'", decl, decl);
6659 static GTY(()) rtx initial_trampoline;
6661 /* Generate RTL for the end of the current function. */
6663 void
6664 expand_function_end (void)
6666 rtx clobber_after;
6668 finish_expr_for_function ();
6670 /* If arg_pointer_save_area was referenced only from a nested
6671 function, we will not have initialized it yet. Do that now. */
6672 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6673 get_arg_pointer_save_area (cfun);
6675 #ifdef NON_SAVING_SETJMP
6676 /* Don't put any variables in registers if we call setjmp
6677 on a machine that fails to restore the registers. */
6678 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6680 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6681 setjmp_protect (DECL_INITIAL (current_function_decl));
6683 setjmp_protect_args ();
6685 #endif
6687 /* If we are doing stack checking and this function makes calls,
6688 do a stack probe at the start of the function to ensure we have enough
6689 space for another stack frame. */
6690 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6692 rtx insn, seq;
6694 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6695 if (GET_CODE (insn) == CALL_INSN)
6697 start_sequence ();
6698 probe_stack_range (STACK_CHECK_PROTECT,
6699 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6700 seq = get_insns ();
6701 end_sequence ();
6702 emit_insn_before (seq, tail_recursion_reentry);
6703 break;
6707 /* Possibly warn about unused parameters.
6708 When frontend does unit-at-a-time, the warning is already
6709 issued at finalization time. */
6710 if (warn_unused_parameter
6711 && !lang_hooks.callgraph.expand_function)
6712 do_warn_unused_parameter (current_function_decl);
6714 /* End any sequences that failed to be closed due to syntax errors. */
6715 while (in_sequence_p ())
6716 end_sequence ();
6718 /* Outside function body, can't compute type's actual size
6719 until next function's body starts. */
6720 immediate_size_expand--;
6722 clear_pending_stack_adjust ();
6723 do_pending_stack_adjust ();
6725 /* @@@ This is a kludge. We want to ensure that instructions that
6726 may trap are not moved into the epilogue by scheduling, because
6727 we don't always emit unwind information for the epilogue.
6728 However, not all machine descriptions define a blockage insn, so
6729 emit an ASM_INPUT to act as one. */
6730 if (flag_non_call_exceptions)
6731 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
6733 /* Mark the end of the function body.
6734 If control reaches this insn, the function can drop through
6735 without returning a value. */
6736 emit_note (NOTE_INSN_FUNCTION_END);
6738 /* Must mark the last line number note in the function, so that the test
6739 coverage code can avoid counting the last line twice. This just tells
6740 the code to ignore the immediately following line note, since there
6741 already exists a copy of this note somewhere above. This line number
6742 note is still needed for debugging though, so we can't delete it. */
6743 if (flag_test_coverage)
6744 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
6746 /* Output a linenumber for the end of the function.
6747 SDB depends on this. */
6748 force_next_line_note ();
6749 emit_line_note (input_location);
6751 /* Before the return label (if any), clobber the return
6752 registers so that they are not propagated live to the rest of
6753 the function. This can only happen with functions that drop
6754 through; if there had been a return statement, there would
6755 have either been a return rtx, or a jump to the return label.
6757 We delay actual code generation after the current_function_value_rtx
6758 is computed. */
6759 clobber_after = get_last_insn ();
6761 /* Output the label for the actual return from the function,
6762 if one is expected. This happens either because a function epilogue
6763 is used instead of a return instruction, or because a return was done
6764 with a goto in order to run local cleanups, or because of pcc-style
6765 structure returning. */
6766 if (return_label)
6767 emit_label (return_label);
6769 /* Let except.c know where it should emit the call to unregister
6770 the function context for sjlj exceptions. */
6771 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6772 sjlj_emit_function_exit_after (get_last_insn ());
6774 /* If we had calls to alloca, and this machine needs
6775 an accurate stack pointer to exit the function,
6776 insert some code to save and restore the stack pointer. */
6777 if (! EXIT_IGNORE_STACK
6778 && current_function_calls_alloca)
6780 rtx tem = 0;
6782 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6783 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6786 /* If scalar return value was computed in a pseudo-reg, or was a named
6787 return value that got dumped to the stack, copy that to the hard
6788 return register. */
6789 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6791 tree decl_result = DECL_RESULT (current_function_decl);
6792 rtx decl_rtl = DECL_RTL (decl_result);
6794 if (REG_P (decl_rtl)
6795 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6796 : DECL_REGISTER (decl_result))
6798 rtx real_decl_rtl = current_function_return_rtx;
6800 /* This should be set in assign_parms. */
6801 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
6802 abort ();
6804 /* If this is a BLKmode structure being returned in registers,
6805 then use the mode computed in expand_return. Note that if
6806 decl_rtl is memory, then its mode may have been changed,
6807 but that current_function_return_rtx has not. */
6808 if (GET_MODE (real_decl_rtl) == BLKmode)
6809 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
6811 /* If a named return value dumped decl_return to memory, then
6812 we may need to re-do the PROMOTE_MODE signed/unsigned
6813 extension. */
6814 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6816 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
6818 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
6819 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6820 &unsignedp, 1);
6822 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6824 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6826 /* If expand_function_start has created a PARALLEL for decl_rtl,
6827 move the result to the real return registers. Otherwise, do
6828 a group load from decl_rtl for a named return. */
6829 if (GET_CODE (decl_rtl) == PARALLEL)
6830 emit_group_move (real_decl_rtl, decl_rtl);
6831 else
6832 emit_group_load (real_decl_rtl, decl_rtl,
6833 TREE_TYPE (decl_result),
6834 int_size_in_bytes (TREE_TYPE (decl_result)));
6836 else
6837 emit_move_insn (real_decl_rtl, decl_rtl);
6841 /* If returning a structure, arrange to return the address of the value
6842 in a place where debuggers expect to find it.
6844 If returning a structure PCC style,
6845 the caller also depends on this value.
6846 And current_function_returns_pcc_struct is not necessarily set. */
6847 if (current_function_returns_struct
6848 || current_function_returns_pcc_struct)
6850 rtx value_address
6851 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6852 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6853 #ifdef FUNCTION_OUTGOING_VALUE
6854 rtx outgoing
6855 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6856 current_function_decl);
6857 #else
6858 rtx outgoing
6859 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6860 #endif
6862 /* Mark this as a function return value so integrate will delete the
6863 assignment and USE below when inlining this function. */
6864 REG_FUNCTION_VALUE_P (outgoing) = 1;
6866 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6867 value_address = convert_memory_address (GET_MODE (outgoing),
6868 value_address);
6870 emit_move_insn (outgoing, value_address);
6872 /* Show return register used to hold result (in this case the address
6873 of the result. */
6874 current_function_return_rtx = outgoing;
6877 /* If this is an implementation of throw, do what's necessary to
6878 communicate between __builtin_eh_return and the epilogue. */
6879 expand_eh_return ();
6881 /* Emit the actual code to clobber return register. */
6883 rtx seq, after;
6885 start_sequence ();
6886 clobber_return_register ();
6887 seq = get_insns ();
6888 end_sequence ();
6890 after = emit_insn_after (seq, clobber_after);
6893 /* Output the label for the naked return from the function, if one is
6894 expected. This is currently used only by __builtin_return. */
6895 if (naked_return_label)
6896 emit_label (naked_return_label);
6898 /* ??? This should no longer be necessary since stupid is no longer with
6899 us, but there are some parts of the compiler (eg reload_combine, and
6900 sh mach_dep_reorg) that still try and compute their own lifetime info
6901 instead of using the general framework. */
6902 use_return_register ();
6904 /* Fix up any gotos that jumped out to the outermost
6905 binding level of the function.
6906 Must follow emitting RETURN_LABEL. */
6908 /* If you have any cleanups to do at this point,
6909 and they need to create temporary variables,
6910 then you will lose. */
6911 expand_fixups (get_insns ());
6915 get_arg_pointer_save_area (struct function *f)
6917 rtx ret = f->x_arg_pointer_save_area;
6919 if (! ret)
6921 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
6922 f->x_arg_pointer_save_area = ret;
6925 if (f == cfun && ! f->arg_pointer_save_area_init)
6927 rtx seq;
6929 /* Save the arg pointer at the beginning of the function. The
6930 generated stack slot may not be a valid memory address, so we
6931 have to check it and fix it if necessary. */
6932 start_sequence ();
6933 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
6934 seq = get_insns ();
6935 end_sequence ();
6937 push_topmost_sequence ();
6938 emit_insn_after (seq, get_insns ());
6939 pop_topmost_sequence ();
6942 return ret;
6945 /* Extend a vector that records the INSN_UIDs of INSNS
6946 (a list of one or more insns). */
6948 static void
6949 record_insns (rtx insns, varray_type *vecp)
6951 int i, len;
6952 rtx tmp;
6954 tmp = insns;
6955 len = 0;
6956 while (tmp != NULL_RTX)
6958 len++;
6959 tmp = NEXT_INSN (tmp);
6962 i = VARRAY_SIZE (*vecp);
6963 VARRAY_GROW (*vecp, i + len);
6964 tmp = insns;
6965 while (tmp != NULL_RTX)
6967 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
6968 i++;
6969 tmp = NEXT_INSN (tmp);
6973 /* Set the locator of the insn chain starting at INSN to LOC. */
6974 static void
6975 set_insn_locators (rtx insn, int loc)
6977 while (insn != NULL_RTX)
6979 if (INSN_P (insn))
6980 INSN_LOCATOR (insn) = loc;
6981 insn = NEXT_INSN (insn);
6985 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
6986 be running after reorg, SEQUENCE rtl is possible. */
6988 static int
6989 contains (rtx insn, varray_type vec)
6991 int i, j;
6993 if (GET_CODE (insn) == INSN
6994 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6996 int count = 0;
6997 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6998 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6999 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7000 count++;
7001 return count;
7003 else
7005 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7006 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7007 return 1;
7009 return 0;
7013 prologue_epilogue_contains (rtx insn)
7015 if (contains (insn, prologue))
7016 return 1;
7017 if (contains (insn, epilogue))
7018 return 1;
7019 return 0;
7023 sibcall_epilogue_contains (rtx insn)
7025 if (sibcall_epilogue)
7026 return contains (insn, sibcall_epilogue);
7027 return 0;
7030 #ifdef HAVE_return
7031 /* Insert gen_return at the end of block BB. This also means updating
7032 block_for_insn appropriately. */
7034 static void
7035 emit_return_into_block (basic_block bb, rtx line_note)
7037 emit_jump_insn_after (gen_return (), BB_END (bb));
7038 if (line_note)
7039 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
7041 #endif /* HAVE_return */
7043 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7045 /* These functions convert the epilogue into a variant that does not modify the
7046 stack pointer. This is used in cases where a function returns an object
7047 whose size is not known until it is computed. The called function leaves the
7048 object on the stack, leaves the stack depressed, and returns a pointer to
7049 the object.
7051 What we need to do is track all modifications and references to the stack
7052 pointer, deleting the modifications and changing the references to point to
7053 the location the stack pointer would have pointed to had the modifications
7054 taken place.
7056 These functions need to be portable so we need to make as few assumptions
7057 about the epilogue as we can. However, the epilogue basically contains
7058 three things: instructions to reset the stack pointer, instructions to
7059 reload registers, possibly including the frame pointer, and an
7060 instruction to return to the caller.
7062 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7063 We also make no attempt to validate the insns we make since if they are
7064 invalid, we probably can't do anything valid. The intent is that these
7065 routines get "smarter" as more and more machines start to use them and
7066 they try operating on different epilogues.
7068 We use the following structure to track what the part of the epilogue that
7069 we've already processed has done. We keep two copies of the SP equivalence,
7070 one for use during the insn we are processing and one for use in the next
7071 insn. The difference is because one part of a PARALLEL may adjust SP
7072 and the other may use it. */
7074 struct epi_info
7076 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7077 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7078 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7079 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7080 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7081 should be set to once we no longer need
7082 its value. */
7083 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
7084 for registers. */
7087 static void handle_epilogue_set (rtx, struct epi_info *);
7088 static void update_epilogue_consts (rtx, rtx, void *);
7089 static void emit_equiv_load (struct epi_info *);
7091 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7092 no modifications to the stack pointer. Return the new list of insns. */
7094 static rtx
7095 keep_stack_depressed (rtx insns)
7097 int j;
7098 struct epi_info info;
7099 rtx insn, next;
7101 /* If the epilogue is just a single instruction, it must be OK as is. */
7102 if (NEXT_INSN (insns) == NULL_RTX)
7103 return insns;
7105 /* Otherwise, start a sequence, initialize the information we have, and
7106 process all the insns we were given. */
7107 start_sequence ();
7109 info.sp_equiv_reg = stack_pointer_rtx;
7110 info.sp_offset = 0;
7111 info.equiv_reg_src = 0;
7113 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
7114 info.const_equiv[j] = 0;
7116 insn = insns;
7117 next = NULL_RTX;
7118 while (insn != NULL_RTX)
7120 next = NEXT_INSN (insn);
7122 if (!INSN_P (insn))
7124 add_insn (insn);
7125 insn = next;
7126 continue;
7129 /* If this insn references the register that SP is equivalent to and
7130 we have a pending load to that register, we must force out the load
7131 first and then indicate we no longer know what SP's equivalent is. */
7132 if (info.equiv_reg_src != 0
7133 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7135 emit_equiv_load (&info);
7136 info.sp_equiv_reg = 0;
7139 info.new_sp_equiv_reg = info.sp_equiv_reg;
7140 info.new_sp_offset = info.sp_offset;
7142 /* If this is a (RETURN) and the return address is on the stack,
7143 update the address and change to an indirect jump. */
7144 if (GET_CODE (PATTERN (insn)) == RETURN
7145 || (GET_CODE (PATTERN (insn)) == PARALLEL
7146 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7148 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7149 rtx base = 0;
7150 HOST_WIDE_INT offset = 0;
7151 rtx jump_insn, jump_set;
7153 /* If the return address is in a register, we can emit the insn
7154 unchanged. Otherwise, it must be a MEM and we see what the
7155 base register and offset are. In any case, we have to emit any
7156 pending load to the equivalent reg of SP, if any. */
7157 if (REG_P (retaddr))
7159 emit_equiv_load (&info);
7160 add_insn (insn);
7161 insn = next;
7162 continue;
7164 else if (GET_CODE (retaddr) == MEM
7165 && REG_P (XEXP (retaddr, 0)))
7166 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7167 else if (GET_CODE (retaddr) == MEM
7168 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7169 && REG_P (XEXP (XEXP (retaddr, 0), 0))
7170 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7172 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7173 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7175 else
7176 abort ();
7178 /* If the base of the location containing the return pointer
7179 is SP, we must update it with the replacement address. Otherwise,
7180 just build the necessary MEM. */
7181 retaddr = plus_constant (base, offset);
7182 if (base == stack_pointer_rtx)
7183 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7184 plus_constant (info.sp_equiv_reg,
7185 info.sp_offset));
7187 retaddr = gen_rtx_MEM (Pmode, retaddr);
7189 /* If there is a pending load to the equivalent register for SP
7190 and we reference that register, we must load our address into
7191 a scratch register and then do that load. */
7192 if (info.equiv_reg_src
7193 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7195 unsigned int regno;
7196 rtx reg;
7198 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7199 if (HARD_REGNO_MODE_OK (regno, Pmode)
7200 && !fixed_regs[regno]
7201 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7202 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7203 regno)
7204 && !refers_to_regno_p (regno,
7205 regno + hard_regno_nregs[regno]
7206 [Pmode],
7207 info.equiv_reg_src, NULL)
7208 && info.const_equiv[regno] == 0)
7209 break;
7211 if (regno == FIRST_PSEUDO_REGISTER)
7212 abort ();
7214 reg = gen_rtx_REG (Pmode, regno);
7215 emit_move_insn (reg, retaddr);
7216 retaddr = reg;
7219 emit_equiv_load (&info);
7220 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7222 /* Show the SET in the above insn is a RETURN. */
7223 jump_set = single_set (jump_insn);
7224 if (jump_set == 0)
7225 abort ();
7226 else
7227 SET_IS_RETURN_P (jump_set) = 1;
7230 /* If SP is not mentioned in the pattern and its equivalent register, if
7231 any, is not modified, just emit it. Otherwise, if neither is set,
7232 replace the reference to SP and emit the insn. If none of those are
7233 true, handle each SET individually. */
7234 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7235 && (info.sp_equiv_reg == stack_pointer_rtx
7236 || !reg_set_p (info.sp_equiv_reg, insn)))
7237 add_insn (insn);
7238 else if (! reg_set_p (stack_pointer_rtx, insn)
7239 && (info.sp_equiv_reg == stack_pointer_rtx
7240 || !reg_set_p (info.sp_equiv_reg, insn)))
7242 if (! validate_replace_rtx (stack_pointer_rtx,
7243 plus_constant (info.sp_equiv_reg,
7244 info.sp_offset),
7245 insn))
7246 abort ();
7248 add_insn (insn);
7250 else if (GET_CODE (PATTERN (insn)) == SET)
7251 handle_epilogue_set (PATTERN (insn), &info);
7252 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7254 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7255 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7256 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7258 else
7259 add_insn (insn);
7261 info.sp_equiv_reg = info.new_sp_equiv_reg;
7262 info.sp_offset = info.new_sp_offset;
7264 /* Now update any constants this insn sets. */
7265 note_stores (PATTERN (insn), update_epilogue_consts, &info);
7266 insn = next;
7269 insns = get_insns ();
7270 end_sequence ();
7271 return insns;
7274 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7275 structure that contains information about what we've seen so far. We
7276 process this SET by either updating that data or by emitting one or
7277 more insns. */
7279 static void
7280 handle_epilogue_set (rtx set, struct epi_info *p)
7282 /* First handle the case where we are setting SP. Record what it is being
7283 set from. If unknown, abort. */
7284 if (reg_set_p (stack_pointer_rtx, set))
7286 if (SET_DEST (set) != stack_pointer_rtx)
7287 abort ();
7289 if (GET_CODE (SET_SRC (set)) == PLUS)
7291 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7292 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7293 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7294 else if (REG_P (XEXP (SET_SRC (set), 1))
7295 && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
7296 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
7297 p->new_sp_offset
7298 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
7299 else
7300 abort ();
7302 else
7303 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7305 /* If we are adjusting SP, we adjust from the old data. */
7306 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7308 p->new_sp_equiv_reg = p->sp_equiv_reg;
7309 p->new_sp_offset += p->sp_offset;
7312 if (p->new_sp_equiv_reg == 0 || !REG_P (p->new_sp_equiv_reg))
7313 abort ();
7315 return;
7318 /* Next handle the case where we are setting SP's equivalent register.
7319 If we already have a value to set it to, abort. We could update, but
7320 there seems little point in handling that case. Note that we have
7321 to allow for the case where we are setting the register set in
7322 the previous part of a PARALLEL inside a single insn. But use the
7323 old offset for any updates within this insn. We must allow for the case
7324 where the register is being set in a different (usually wider) mode than
7325 Pmode). */
7326 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7328 if (p->equiv_reg_src != 0
7329 || !REG_P (p->new_sp_equiv_reg)
7330 || !REG_P (SET_DEST (set))
7331 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
7332 || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
7333 abort ();
7334 else
7335 p->equiv_reg_src
7336 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7337 plus_constant (p->sp_equiv_reg,
7338 p->sp_offset));
7341 /* Otherwise, replace any references to SP in the insn to its new value
7342 and emit the insn. */
7343 else
7345 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7346 plus_constant (p->sp_equiv_reg,
7347 p->sp_offset));
7348 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7349 plus_constant (p->sp_equiv_reg,
7350 p->sp_offset));
7351 emit_insn (set);
7355 /* Update the tracking information for registers set to constants. */
7357 static void
7358 update_epilogue_consts (rtx dest, rtx x, void *data)
7360 struct epi_info *p = (struct epi_info *) data;
7361 rtx new;
7363 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
7364 return;
7366 /* If we are either clobbering a register or doing a partial set,
7367 show we don't know the value. */
7368 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
7369 p->const_equiv[REGNO (dest)] = 0;
7371 /* If we are setting it to a constant, record that constant. */
7372 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
7373 p->const_equiv[REGNO (dest)] = SET_SRC (x);
7375 /* If this is a binary operation between a register we have been tracking
7376 and a constant, see if we can compute a new constant value. */
7377 else if (ARITHMETIC_P (SET_SRC (x))
7378 && REG_P (XEXP (SET_SRC (x), 0))
7379 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
7380 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
7381 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
7382 && 0 != (new = simplify_binary_operation
7383 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
7384 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
7385 XEXP (SET_SRC (x), 1)))
7386 && GET_CODE (new) == CONST_INT)
7387 p->const_equiv[REGNO (dest)] = new;
7389 /* Otherwise, we can't do anything with this value. */
7390 else
7391 p->const_equiv[REGNO (dest)] = 0;
7394 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7396 static void
7397 emit_equiv_load (struct epi_info *p)
7399 if (p->equiv_reg_src != 0)
7401 rtx dest = p->sp_equiv_reg;
7403 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
7404 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
7405 REGNO (p->sp_equiv_reg));
7407 emit_move_insn (dest, p->equiv_reg_src);
7408 p->equiv_reg_src = 0;
7411 #endif
7413 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7414 this into place with notes indicating where the prologue ends and where
7415 the epilogue begins. Update the basic block information when possible. */
7417 void
7418 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
7420 int inserted = 0;
7421 edge e;
7422 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7423 rtx seq;
7424 #endif
7425 #ifdef HAVE_prologue
7426 rtx prologue_end = NULL_RTX;
7427 #endif
7428 #if defined (HAVE_epilogue) || defined(HAVE_return)
7429 rtx epilogue_end = NULL_RTX;
7430 #endif
7432 #ifdef HAVE_prologue
7433 if (HAVE_prologue)
7435 start_sequence ();
7436 seq = gen_prologue ();
7437 emit_insn (seq);
7439 /* Retain a map of the prologue insns. */
7440 record_insns (seq, &prologue);
7441 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
7443 seq = get_insns ();
7444 end_sequence ();
7445 set_insn_locators (seq, prologue_locator);
7447 /* Can't deal with multiple successors of the entry block
7448 at the moment. Function should always have at least one
7449 entry point. */
7450 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7451 abort ();
7453 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7454 inserted = 1;
7456 #endif
7458 /* If the exit block has no non-fake predecessors, we don't need
7459 an epilogue. */
7460 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7461 if ((e->flags & EDGE_FAKE) == 0)
7462 break;
7463 if (e == NULL)
7464 goto epilogue_done;
7466 #ifdef HAVE_return
7467 if (optimize && HAVE_return)
7469 /* If we're allowed to generate a simple return instruction,
7470 then by definition we don't need a full epilogue. Examine
7471 the block that falls through to EXIT. If it does not
7472 contain any code, examine its predecessors and try to
7473 emit (conditional) return instructions. */
7475 basic_block last;
7476 edge e_next;
7477 rtx label;
7479 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7480 if (e->flags & EDGE_FALLTHRU)
7481 break;
7482 if (e == NULL)
7483 goto epilogue_done;
7484 last = e->src;
7486 /* Verify that there are no active instructions in the last block. */
7487 label = BB_END (last);
7488 while (label && GET_CODE (label) != CODE_LABEL)
7490 if (active_insn_p (label))
7491 break;
7492 label = PREV_INSN (label);
7495 if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
7497 rtx epilogue_line_note = NULL_RTX;
7499 /* Locate the line number associated with the closing brace,
7500 if we can find one. */
7501 for (seq = get_last_insn ();
7502 seq && ! active_insn_p (seq);
7503 seq = PREV_INSN (seq))
7504 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7506 epilogue_line_note = seq;
7507 break;
7510 for (e = last->pred; e; e = e_next)
7512 basic_block bb = e->src;
7513 rtx jump;
7515 e_next = e->pred_next;
7516 if (bb == ENTRY_BLOCK_PTR)
7517 continue;
7519 jump = BB_END (bb);
7520 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7521 continue;
7523 /* If we have an unconditional jump, we can replace that
7524 with a simple return instruction. */
7525 if (simplejump_p (jump))
7527 emit_return_into_block (bb, epilogue_line_note);
7528 delete_insn (jump);
7531 /* If we have a conditional jump, we can try to replace
7532 that with a conditional return instruction. */
7533 else if (condjump_p (jump))
7535 if (! redirect_jump (jump, 0, 0))
7536 continue;
7538 /* If this block has only one successor, it both jumps
7539 and falls through to the fallthru block, so we can't
7540 delete the edge. */
7541 if (bb->succ->succ_next == NULL)
7542 continue;
7544 else
7545 continue;
7547 /* Fix up the CFG for the successful change we just made. */
7548 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7551 /* Emit a return insn for the exit fallthru block. Whether
7552 this is still reachable will be determined later. */
7554 emit_barrier_after (BB_END (last));
7555 emit_return_into_block (last, epilogue_line_note);
7556 epilogue_end = BB_END (last);
7557 last->succ->flags &= ~EDGE_FALLTHRU;
7558 goto epilogue_done;
7561 #endif
7562 /* Find the edge that falls through to EXIT. Other edges may exist
7563 due to RETURN instructions, but those don't need epilogues.
7564 There really shouldn't be a mixture -- either all should have
7565 been converted or none, however... */
7567 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7568 if (e->flags & EDGE_FALLTHRU)
7569 break;
7570 if (e == NULL)
7571 goto epilogue_done;
7573 #ifdef HAVE_epilogue
7574 if (HAVE_epilogue)
7576 start_sequence ();
7577 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
7579 seq = gen_epilogue ();
7581 #ifdef INCOMING_RETURN_ADDR_RTX
7582 /* If this function returns with the stack depressed and we can support
7583 it, massage the epilogue to actually do that. */
7584 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7585 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7586 seq = keep_stack_depressed (seq);
7587 #endif
7589 emit_jump_insn (seq);
7591 /* Retain a map of the epilogue insns. */
7592 record_insns (seq, &epilogue);
7593 set_insn_locators (seq, epilogue_locator);
7595 seq = get_insns ();
7596 end_sequence ();
7598 insert_insn_on_edge (seq, e);
7599 inserted = 1;
7601 else
7602 #endif
7604 basic_block cur_bb;
7606 if (! next_active_insn (BB_END (e->src)))
7607 goto epilogue_done;
7608 /* We have a fall-through edge to the exit block, the source is not
7609 at the end of the function, and there will be an assembler epilogue
7610 at the end of the function.
7611 We can't use force_nonfallthru here, because that would try to
7612 use return. Inserting a jump 'by hand' is extremely messy, so
7613 we take advantage of cfg_layout_finalize using
7614 fixup_fallthru_exit_predecessor. */
7615 cfg_layout_initialize ();
7616 FOR_EACH_BB (cur_bb)
7617 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
7618 cur_bb->rbi->next = cur_bb->next_bb;
7619 cfg_layout_finalize ();
7621 epilogue_done:
7623 if (inserted)
7624 commit_edge_insertions ();
7626 #ifdef HAVE_sibcall_epilogue
7627 /* Emit sibling epilogues before any sibling call sites. */
7628 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7630 basic_block bb = e->src;
7631 rtx insn = BB_END (bb);
7632 rtx i;
7633 rtx newinsn;
7635 if (GET_CODE (insn) != CALL_INSN
7636 || ! SIBLING_CALL_P (insn))
7637 continue;
7639 start_sequence ();
7640 emit_insn (gen_sibcall_epilogue ());
7641 seq = get_insns ();
7642 end_sequence ();
7644 /* Retain a map of the epilogue insns. Used in life analysis to
7645 avoid getting rid of sibcall epilogue insns. Do this before we
7646 actually emit the sequence. */
7647 record_insns (seq, &sibcall_epilogue);
7648 set_insn_locators (seq, epilogue_locator);
7650 i = PREV_INSN (insn);
7651 newinsn = emit_insn_before (seq, insn);
7653 #endif
7655 #ifdef HAVE_prologue
7656 /* This is probably all useless now that we use locators. */
7657 if (prologue_end)
7659 rtx insn, prev;
7661 /* GDB handles `break f' by setting a breakpoint on the first
7662 line note after the prologue. Which means (1) that if
7663 there are line number notes before where we inserted the
7664 prologue we should move them, and (2) we should generate a
7665 note before the end of the first basic block, if there isn't
7666 one already there.
7668 ??? This behavior is completely broken when dealing with
7669 multiple entry functions. We simply place the note always
7670 into first basic block and let alternate entry points
7671 to be missed.
7674 for (insn = prologue_end; insn; insn = prev)
7676 prev = PREV_INSN (insn);
7677 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7679 /* Note that we cannot reorder the first insn in the
7680 chain, since rest_of_compilation relies on that
7681 remaining constant. */
7682 if (prev == NULL)
7683 break;
7684 reorder_insns (insn, insn, prologue_end);
7688 /* Find the last line number note in the first block. */
7689 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
7690 insn != prologue_end && insn;
7691 insn = PREV_INSN (insn))
7692 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7693 break;
7695 /* If we didn't find one, make a copy of the first line number
7696 we run across. */
7697 if (! insn)
7699 for (insn = next_active_insn (prologue_end);
7700 insn;
7701 insn = PREV_INSN (insn))
7702 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7704 emit_note_copy_after (insn, prologue_end);
7705 break;
7709 #endif
7710 #ifdef HAVE_epilogue
7711 if (epilogue_end)
7713 rtx insn, next;
7715 /* Similarly, move any line notes that appear after the epilogue.
7716 There is no need, however, to be quite so anal about the existence
7717 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
7718 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
7719 info generation. */
7720 for (insn = epilogue_end; insn; insn = next)
7722 next = NEXT_INSN (insn);
7723 if (GET_CODE (insn) == NOTE
7724 && (NOTE_LINE_NUMBER (insn) > 0
7725 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
7726 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
7727 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7730 #endif
7733 /* Reposition the prologue-end and epilogue-begin notes after instruction
7734 scheduling and delayed branch scheduling. */
7736 void
7737 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
7739 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7740 rtx insn, last, note;
7741 int len;
7743 if ((len = VARRAY_SIZE (prologue)) > 0)
7745 last = 0, note = 0;
7747 /* Scan from the beginning until we reach the last prologue insn.
7748 We apparently can't depend on basic_block_{head,end} after
7749 reorg has run. */
7750 for (insn = f; insn; insn = NEXT_INSN (insn))
7752 if (GET_CODE (insn) == NOTE)
7754 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7755 note = insn;
7757 else if (contains (insn, prologue))
7759 last = insn;
7760 if (--len == 0)
7761 break;
7765 if (last)
7767 /* Find the prologue-end note if we haven't already, and
7768 move it to just after the last prologue insn. */
7769 if (note == 0)
7771 for (note = last; (note = NEXT_INSN (note));)
7772 if (GET_CODE (note) == NOTE
7773 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7774 break;
7777 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7778 if (GET_CODE (last) == CODE_LABEL)
7779 last = NEXT_INSN (last);
7780 reorder_insns (note, note, last);
7784 if ((len = VARRAY_SIZE (epilogue)) > 0)
7786 last = 0, note = 0;
7788 /* Scan from the end until we reach the first epilogue insn.
7789 We apparently can't depend on basic_block_{head,end} after
7790 reorg has run. */
7791 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7793 if (GET_CODE (insn) == NOTE)
7795 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7796 note = insn;
7798 else if (contains (insn, epilogue))
7800 last = insn;
7801 if (--len == 0)
7802 break;
7806 if (last)
7808 /* Find the epilogue-begin note if we haven't already, and
7809 move it to just before the first epilogue insn. */
7810 if (note == 0)
7812 for (note = insn; (note = PREV_INSN (note));)
7813 if (GET_CODE (note) == NOTE
7814 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7815 break;
7818 if (PREV_INSN (last) != note)
7819 reorder_insns (note, note, PREV_INSN (last));
7822 #endif /* HAVE_prologue or HAVE_epilogue */
7825 /* Called once, at initialization, to initialize function.c. */
7827 void
7828 init_function_once (void)
7830 VARRAY_INT_INIT (prologue, 0, "prologue");
7831 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7832 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7835 /* Resets insn_block_boundaries array. */
7837 void
7838 reset_block_changes (void)
7840 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
7841 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
7844 /* Record the boundary for BLOCK. */
7845 void
7846 record_block_change (tree block)
7848 int i, n;
7849 tree last_block;
7851 if (!block)
7852 return;
7854 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
7855 VARRAY_POP (cfun->ib_boundaries_block);
7856 n = get_max_uid ();
7857 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
7858 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
7860 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
7863 /* Finishes record of boundaries. */
7864 void finalize_block_changes (void)
7866 record_block_change (DECL_INITIAL (current_function_decl));
7869 /* For INSN return the BLOCK it belongs to. */
7870 void
7871 check_block_change (rtx insn, tree *block)
7873 unsigned uid = INSN_UID (insn);
7875 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
7876 return;
7878 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
7881 /* Releases the ib_boundaries_block records. */
7882 void
7883 free_block_changes (void)
7885 cfun->ib_boundaries_block = NULL;
7888 /* Returns the name of the current function. */
7889 const char *
7890 current_function_name (void)
7892 return lang_hooks.decl_printable_name (cfun->decl, 2);
7895 #include "gt-function.h"