Daily bump.
[official-gcc.git] / gcc / function.c
blob9ddf676b5aba4cdb2d0bcdae662a6002e1f45df5
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hash.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't contain any instructions
106 that can throw an exception. This is set prior to final. */
108 int current_function_nothrow;
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero.
122 calls.c:emit_library_call_value_1 uses it to set up
123 post-instantiation libcalls. */
124 int virtuals_instantiated;
126 /* Assign unique numbers to labels generated for profiling. */
127 static int profile_label_no;
129 /* These variables hold pointers to functions to create and destroy
130 target specific, per-function data structures. */
131 void (*init_machine_status) PARAMS ((struct function *));
132 void (*free_machine_status) PARAMS ((struct function *));
133 /* This variable holds a pointer to a function to register any
134 data items in the target specific, per-function data structure
135 that will need garbage collection. */
136 void (*mark_machine_status) PARAMS ((struct function *));
138 /* Likewise, but for language-specific data. */
139 void (*init_lang_status) PARAMS ((struct function *));
140 void (*save_lang_status) PARAMS ((struct function *));
141 void (*restore_lang_status) PARAMS ((struct function *));
142 void (*mark_lang_status) PARAMS ((struct function *));
143 void (*free_lang_status) PARAMS ((struct function *));
145 /* The FUNCTION_DECL for an inline function currently being expanded. */
146 tree inline_function_decl;
148 /* The currently compiled function. */
149 struct function *cfun = 0;
151 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
152 static varray_type prologue;
153 static varray_type epilogue;
155 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
156 in this function. */
157 static varray_type sibcall_epilogue;
159 /* In order to evaluate some expressions, such as function calls returning
160 structures in memory, we need to temporarily allocate stack locations.
161 We record each allocated temporary in the following structure.
163 Associated with each temporary slot is a nesting level. When we pop up
164 one level, all temporaries associated with the previous level are freed.
165 Normally, all temporaries are freed after the execution of the statement
166 in which they were created. However, if we are inside a ({...}) grouping,
167 the result may be in a temporary and hence must be preserved. If the
168 result could be in a temporary, we preserve it if we can determine which
169 one it is in. If we cannot determine which temporary may contain the
170 result, all temporaries are preserved. A temporary is preserved by
171 pretending it was allocated at the previous nesting level.
173 Automatic variables are also assigned temporary slots, at the nesting
174 level where they are defined. They are marked a "kept" so that
175 free_temp_slots will not free them. */
177 struct temp_slot
179 /* Points to next temporary slot. */
180 struct temp_slot *next;
181 /* The rtx to used to reference the slot. */
182 rtx slot;
183 /* The rtx used to represent the address if not the address of the
184 slot above. May be an EXPR_LIST if multiple addresses exist. */
185 rtx address;
186 /* The alignment (in bits) of the slot. */
187 unsigned int align;
188 /* The size, in units, of the slot. */
189 HOST_WIDE_INT size;
190 /* The type of the object in the slot, or zero if it doesn't correspond
191 to a type. We use this to determine whether a slot can be reused.
192 It can be reused if objects of the type of the new slot will always
193 conflict with objects of the type of the old slot. */
194 tree type;
195 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 tree rtl_expr;
197 /* Non-zero if this temporary is currently in use. */
198 char in_use;
199 /* Non-zero if this temporary has its address taken. */
200 char addr_taken;
201 /* Nesting level at which this slot is being used. */
202 int level;
203 /* Non-zero if this should survive a call to free_temp_slots. */
204 int keep;
205 /* The offset of the slot from the frame_pointer, including extra space
206 for alignment. This info is for combine_temp_slots. */
207 HOST_WIDE_INT base_offset;
208 /* The size of the slot, including extra space for alignment. This
209 info is for combine_temp_slots. */
210 HOST_WIDE_INT full_size;
213 /* This structure is used to record MEMs or pseudos used to replace VAR, any
214 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
215 maintain this list in case two operands of an insn were required to match;
216 in that case we must ensure we use the same replacement. */
218 struct fixup_replacement
220 rtx old;
221 rtx new;
222 struct fixup_replacement *next;
225 struct insns_for_mem_entry
227 /* The KEY in HE will be a MEM. */
228 struct hash_entry he;
229 /* These are the INSNS which reference the MEM. */
230 rtx insns;
233 /* Forward declarations. */
235 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
236 int, struct function *));
237 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
238 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
239 enum machine_mode, enum machine_mode,
240 int, unsigned int, int,
241 struct hash_table *));
242 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
243 enum machine_mode,
244 struct hash_table *));
245 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
246 struct hash_table *));
247 static struct fixup_replacement
248 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
249 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
250 int, int, rtx));
251 static void fixup_var_refs_insns_with_hash
252 PARAMS ((struct hash_table *, rtx,
253 enum machine_mode, int, rtx));
254 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
255 int, int, rtx));
256 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
257 struct fixup_replacement **, rtx));
258 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
259 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
260 int));
261 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
262 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
263 static void instantiate_decls PARAMS ((tree, int));
264 static void instantiate_decls_1 PARAMS ((tree, int));
265 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
266 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
267 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
268 static void delete_handlers PARAMS ((void));
269 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
270 struct args_size *));
271 #ifndef ARGS_GROW_DOWNWARD
272 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
273 tree));
274 #endif
275 static rtx round_trampoline_addr PARAMS ((rtx));
276 static rtx adjust_trampoline_addr PARAMS ((rtx));
277 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
278 static void reorder_blocks_0 PARAMS ((tree));
279 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
280 static void reorder_fix_fragments PARAMS ((tree));
281 static tree blocks_nreverse PARAMS ((tree));
282 static int all_blocks PARAMS ((tree, tree *));
283 static tree *get_block_vector PARAMS ((tree, int *));
284 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
285 /* We always define `record_insns' even if its not used so that we
286 can always export `prologue_epilogue_contains'. */
287 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
288 static int contains PARAMS ((rtx, varray_type));
289 #ifdef HAVE_return
290 static void emit_return_into_block PARAMS ((basic_block, rtx));
291 #endif
292 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
293 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
294 struct hash_table *));
295 static void purge_single_hard_subreg_set PARAMS ((rtx));
296 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
297 static rtx keep_stack_depressed PARAMS ((rtx));
298 #endif
299 static int is_addressof PARAMS ((rtx *, void *));
300 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
301 struct hash_table *,
302 hash_table_key));
303 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
304 static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
305 static int insns_for_mem_walk PARAMS ((rtx *, void *));
306 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
307 static void mark_function_status PARAMS ((struct function *));
308 static void maybe_mark_struct_function PARAMS ((void *));
309 static void prepare_function_start PARAMS ((void));
310 static void do_clobber_return_reg PARAMS ((rtx, void *));
311 static void do_use_return_reg PARAMS ((rtx, void *));
313 /* Pointer to chain of `struct function' for containing functions. */
314 static struct function *outer_function_chain;
316 /* Given a function decl for a containing function,
317 return the `struct function' for it. */
319 struct function *
320 find_function_data (decl)
321 tree decl;
323 struct function *p;
325 for (p = outer_function_chain; p; p = p->outer)
326 if (p->decl == decl)
327 return p;
329 abort ();
332 /* Save the current context for compilation of a nested function.
333 This is called from language-specific code. The caller should use
334 the save_lang_status callback to save any language-specific state,
335 since this function knows only about language-independent
336 variables. */
338 void
339 push_function_context_to (context)
340 tree context;
342 struct function *p;
344 if (context)
346 if (context == current_function_decl)
347 cfun->contains_functions = 1;
348 else
350 struct function *containing = find_function_data (context);
351 containing->contains_functions = 1;
355 if (cfun == 0)
356 init_dummy_function_start ();
357 p = cfun;
359 p->outer = outer_function_chain;
360 outer_function_chain = p;
361 p->fixup_var_refs_queue = 0;
363 if (save_lang_status)
364 (*save_lang_status) (p);
366 cfun = 0;
369 void
370 push_function_context ()
372 push_function_context_to (current_function_decl);
375 /* Restore the last saved context, at the end of a nested function.
376 This function is called from language-specific code. */
378 void
379 pop_function_context_from (context)
380 tree context ATTRIBUTE_UNUSED;
382 struct function *p = outer_function_chain;
383 struct var_refs_queue *queue;
385 cfun = p;
386 outer_function_chain = p->outer;
388 current_function_decl = p->decl;
389 reg_renumber = 0;
391 restore_emit_status (p);
393 if (restore_lang_status)
394 (*restore_lang_status) (p);
396 /* Finish doing put_var_into_stack for any of our variables which became
397 addressable during the nested function. If only one entry has to be
398 fixed up, just do that one. Otherwise, first make a list of MEMs that
399 are not to be unshared. */
400 if (p->fixup_var_refs_queue == 0)
402 else if (p->fixup_var_refs_queue->next == 0)
403 fixup_var_refs (p->fixup_var_refs_queue->modified,
404 p->fixup_var_refs_queue->promoted_mode,
405 p->fixup_var_refs_queue->unsignedp,
406 p->fixup_var_refs_queue->modified, 0);
407 else
409 rtx list = 0;
411 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
412 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
414 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
415 fixup_var_refs (queue->modified, queue->promoted_mode,
416 queue->unsignedp, list, 0);
420 p->fixup_var_refs_queue = 0;
422 /* Reset variables that have known state during rtx generation. */
423 rtx_equal_function_value_matters = 1;
424 virtuals_instantiated = 0;
425 generating_concat_p = 1;
428 void
429 pop_function_context ()
431 pop_function_context_from (current_function_decl);
434 /* Clear out all parts of the state in F that can safely be discarded
435 after the function has been parsed, but not compiled, to let
436 garbage collection reclaim the memory. */
438 void
439 free_after_parsing (f)
440 struct function *f;
442 /* f->expr->forced_labels is used by code generation. */
443 /* f->emit->regno_reg_rtx is used by code generation. */
444 /* f->varasm is used by code generation. */
445 /* f->eh->eh_return_stub_label is used by code generation. */
447 if (free_lang_status)
448 (*free_lang_status) (f);
449 free_stmt_status (f);
452 /* Clear out all parts of the state in F that can safely be discarded
453 after the function has been compiled, to let garbage collection
454 reclaim the memory. */
456 void
457 free_after_compilation (f)
458 struct function *f;
460 free_eh_status (f);
461 free_expr_status (f);
462 free_emit_status (f);
463 free_varasm_status (f);
465 if (free_machine_status)
466 (*free_machine_status) (f);
468 if (f->x_parm_reg_stack_loc)
469 free (f->x_parm_reg_stack_loc);
471 f->x_temp_slots = NULL;
472 f->arg_offset_rtx = NULL;
473 f->return_rtx = NULL;
474 f->internal_arg_pointer = NULL;
475 f->x_nonlocal_labels = NULL;
476 f->x_nonlocal_goto_handler_slots = NULL;
477 f->x_nonlocal_goto_handler_labels = NULL;
478 f->x_nonlocal_goto_stack_level = NULL;
479 f->x_cleanup_label = NULL;
480 f->x_return_label = NULL;
481 f->x_save_expr_regs = NULL;
482 f->x_stack_slot_list = NULL;
483 f->x_rtl_expr_chain = NULL;
484 f->x_tail_recursion_label = NULL;
485 f->x_tail_recursion_reentry = NULL;
486 f->x_arg_pointer_save_area = NULL;
487 f->x_clobber_return_insn = NULL;
488 f->x_context_display = NULL;
489 f->x_trampoline_list = NULL;
490 f->x_parm_birth_insn = NULL;
491 f->x_last_parm_insn = NULL;
492 f->x_parm_reg_stack_loc = NULL;
493 f->fixup_var_refs_queue = NULL;
494 f->original_arg_vector = NULL;
495 f->original_decl_initial = NULL;
496 f->inl_last_parm_insn = NULL;
497 f->epilogue_delay_list = NULL;
500 /* Allocate fixed slots in the stack frame of the current function. */
502 /* Return size needed for stack frame based on slots so far allocated in
503 function F.
504 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
505 the caller may have to do that. */
507 HOST_WIDE_INT
508 get_func_frame_size (f)
509 struct function *f;
511 #ifdef FRAME_GROWS_DOWNWARD
512 return -f->x_frame_offset;
513 #else
514 return f->x_frame_offset;
515 #endif
518 /* Return size needed for stack frame based on slots so far allocated.
519 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
520 the caller may have to do that. */
521 HOST_WIDE_INT
522 get_frame_size ()
524 return get_func_frame_size (cfun);
527 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
528 with machine mode MODE.
530 ALIGN controls the amount of alignment for the address of the slot:
531 0 means according to MODE,
532 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
533 positive specifies alignment boundary in bits.
535 We do not round to stack_boundary here.
537 FUNCTION specifies the function to allocate in. */
539 static rtx
540 assign_stack_local_1 (mode, size, align, function)
541 enum machine_mode mode;
542 HOST_WIDE_INT size;
543 int align;
544 struct function *function;
546 rtx x, addr;
547 int bigend_correction = 0;
548 int alignment;
549 int frame_off, frame_alignment, frame_phase;
551 if (align == 0)
553 tree type;
555 if (mode == BLKmode)
556 alignment = BIGGEST_ALIGNMENT;
557 else
558 alignment = GET_MODE_ALIGNMENT (mode);
560 /* Allow the target to (possibly) increase the alignment of this
561 stack slot. */
562 type = type_for_mode (mode, 0);
563 if (type)
564 alignment = LOCAL_ALIGNMENT (type, alignment);
566 alignment /= BITS_PER_UNIT;
568 else if (align == -1)
570 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
571 size = CEIL_ROUND (size, alignment);
573 else
574 alignment = align / BITS_PER_UNIT;
576 #ifdef FRAME_GROWS_DOWNWARD
577 function->x_frame_offset -= size;
578 #endif
580 /* Ignore alignment we can't do with expected alignment of the boundary. */
581 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
582 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
584 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
585 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
587 /* Calculate how many bytes the start of local variables is off from
588 stack alignment. */
589 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
590 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
591 frame_phase = frame_off ? frame_alignment - frame_off : 0;
593 /* Round frame offset to that alignment.
594 We must be careful here, since FRAME_OFFSET might be negative and
595 division with a negative dividend isn't as well defined as we might
596 like. So we instead assume that ALIGNMENT is a power of two and
597 use logical operations which are unambiguous. */
598 #ifdef FRAME_GROWS_DOWNWARD
599 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
600 #else
601 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
602 #endif
604 /* On a big-endian machine, if we are allocating more space than we will use,
605 use the least significant bytes of those that are allocated. */
606 if (BYTES_BIG_ENDIAN && mode != BLKmode)
607 bigend_correction = size - GET_MODE_SIZE (mode);
609 /* If we have already instantiated virtual registers, return the actual
610 address relative to the frame pointer. */
611 if (function == cfun && virtuals_instantiated)
612 addr = plus_constant (frame_pointer_rtx,
613 (frame_offset + bigend_correction
614 + STARTING_FRAME_OFFSET));
615 else
616 addr = plus_constant (virtual_stack_vars_rtx,
617 function->x_frame_offset + bigend_correction);
619 #ifndef FRAME_GROWS_DOWNWARD
620 function->x_frame_offset += size;
621 #endif
623 x = gen_rtx_MEM (mode, addr);
625 function->x_stack_slot_list
626 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
628 return x;
631 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
632 current function. */
635 assign_stack_local (mode, size, align)
636 enum machine_mode mode;
637 HOST_WIDE_INT size;
638 int align;
640 return assign_stack_local_1 (mode, size, align, cfun);
643 /* Allocate a temporary stack slot and record it for possible later
644 reuse.
646 MODE is the machine mode to be given to the returned rtx.
648 SIZE is the size in units of the space required. We do no rounding here
649 since assign_stack_local will do any required rounding.
651 KEEP is 1 if this slot is to be retained after a call to
652 free_temp_slots. Automatic variables for a block are allocated
653 with this flag. KEEP is 2 if we allocate a longer term temporary,
654 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
655 if we are to allocate something at an inner level to be treated as
656 a variable in the block (e.g., a SAVE_EXPR).
658 TYPE is the type that will be used for the stack slot. */
661 assign_stack_temp_for_type (mode, size, keep, type)
662 enum machine_mode mode;
663 HOST_WIDE_INT size;
664 int keep;
665 tree type;
667 unsigned int align;
668 struct temp_slot *p, *best_p = 0;
669 rtx slot;
671 /* If SIZE is -1 it means that somebody tried to allocate a temporary
672 of a variable size. */
673 if (size == -1)
674 abort ();
676 if (mode == BLKmode)
677 align = BIGGEST_ALIGNMENT;
678 else
679 align = GET_MODE_ALIGNMENT (mode);
681 if (! type)
682 type = type_for_mode (mode, 0);
684 if (type)
685 align = LOCAL_ALIGNMENT (type, align);
687 /* Try to find an available, already-allocated temporary of the proper
688 mode which meets the size and alignment requirements. Choose the
689 smallest one with the closest alignment. */
690 for (p = temp_slots; p; p = p->next)
691 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
692 && ! p->in_use
693 && objects_must_conflict_p (p->type, type)
694 && (best_p == 0 || best_p->size > p->size
695 || (best_p->size == p->size && best_p->align > p->align)))
697 if (p->align == align && p->size == size)
699 best_p = 0;
700 break;
702 best_p = p;
705 /* Make our best, if any, the one to use. */
706 if (best_p)
708 /* If there are enough aligned bytes left over, make them into a new
709 temp_slot so that the extra bytes don't get wasted. Do this only
710 for BLKmode slots, so that we can be sure of the alignment. */
711 if (GET_MODE (best_p->slot) == BLKmode)
713 int alignment = best_p->align / BITS_PER_UNIT;
714 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
716 if (best_p->size - rounded_size >= alignment)
718 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
719 p->in_use = p->addr_taken = 0;
720 p->size = best_p->size - rounded_size;
721 p->base_offset = best_p->base_offset + rounded_size;
722 p->full_size = best_p->full_size - rounded_size;
723 p->slot = gen_rtx_MEM (BLKmode,
724 plus_constant (XEXP (best_p->slot, 0),
725 rounded_size));
726 p->align = best_p->align;
727 p->address = 0;
728 p->rtl_expr = 0;
729 p->type = best_p->type;
730 p->next = temp_slots;
731 temp_slots = p;
733 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
734 stack_slot_list);
736 best_p->size = rounded_size;
737 best_p->full_size = rounded_size;
741 p = best_p;
744 /* If we still didn't find one, make a new temporary. */
745 if (p == 0)
747 HOST_WIDE_INT frame_offset_old = frame_offset;
749 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
751 /* We are passing an explicit alignment request to assign_stack_local.
752 One side effect of that is assign_stack_local will not round SIZE
753 to ensure the frame offset remains suitably aligned.
755 So for requests which depended on the rounding of SIZE, we go ahead
756 and round it now. We also make sure ALIGNMENT is at least
757 BIGGEST_ALIGNMENT. */
758 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
759 abort ();
760 p->slot = assign_stack_local (mode,
761 (mode == BLKmode
762 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
763 : size),
764 align);
766 p->align = align;
768 /* The following slot size computation is necessary because we don't
769 know the actual size of the temporary slot until assign_stack_local
770 has performed all the frame alignment and size rounding for the
771 requested temporary. Note that extra space added for alignment
772 can be either above or below this stack slot depending on which
773 way the frame grows. We include the extra space if and only if it
774 is above this slot. */
775 #ifdef FRAME_GROWS_DOWNWARD
776 p->size = frame_offset_old - frame_offset;
777 #else
778 p->size = size;
779 #endif
781 /* Now define the fields used by combine_temp_slots. */
782 #ifdef FRAME_GROWS_DOWNWARD
783 p->base_offset = frame_offset;
784 p->full_size = frame_offset_old - frame_offset;
785 #else
786 p->base_offset = frame_offset_old;
787 p->full_size = frame_offset - frame_offset_old;
788 #endif
789 p->address = 0;
790 p->next = temp_slots;
791 temp_slots = p;
794 p->in_use = 1;
795 p->addr_taken = 0;
796 p->rtl_expr = seq_rtl_expr;
797 p->type = type;
799 if (keep == 2)
801 p->level = target_temp_slot_level;
802 p->keep = 0;
804 else if (keep == 3)
806 p->level = var_temp_slot_level;
807 p->keep = 0;
809 else
811 p->level = temp_slot_level;
812 p->keep = keep;
816 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
817 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
818 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
820 /* If we know the alias set for the memory that will be used, use
821 it. If there's no TYPE, then we don't know anything about the
822 alias set for the memory. */
823 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
824 set_mem_align (slot, align);
826 /* If a type is specified, set the relevant flags. */
827 if (type != 0)
829 RTX_UNCHANGING_P (slot) = TYPE_READONLY (type);
830 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
831 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
834 return slot;
837 /* Allocate a temporary stack slot and record it for possible later
838 reuse. First three arguments are same as in preceding function. */
841 assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
843 HOST_WIDE_INT size;
844 int keep;
846 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
849 /* Assign a temporary.
850 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
851 and so that should be used in error messages. In either case, we
852 allocate of the given type.
853 KEEP is as for assign_stack_temp.
854 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
855 it is 0 if a register is OK.
856 DONT_PROMOTE is 1 if we should not promote values in register
857 to wider modes. */
860 assign_temp (type_or_decl, keep, memory_required, dont_promote)
861 tree type_or_decl;
862 int keep;
863 int memory_required;
864 int dont_promote ATTRIBUTE_UNUSED;
866 tree type, decl;
867 enum machine_mode mode;
868 #ifndef PROMOTE_FOR_CALL_ONLY
869 int unsignedp;
870 #endif
872 if (DECL_P (type_or_decl))
873 decl = type_or_decl, type = TREE_TYPE (decl);
874 else
875 decl = NULL, type = type_or_decl;
877 mode = TYPE_MODE (type);
878 #ifndef PROMOTE_FOR_CALL_ONLY
879 unsignedp = TREE_UNSIGNED (type);
880 #endif
882 if (mode == BLKmode || memory_required)
884 HOST_WIDE_INT size = int_size_in_bytes (type);
885 rtx tmp;
887 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
888 problems with allocating the stack space. */
889 if (size == 0)
890 size = 1;
892 /* Unfortunately, we don't yet know how to allocate variable-sized
893 temporaries. However, sometimes we have a fixed upper limit on
894 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
895 instead. This is the case for Chill variable-sized strings. */
896 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
897 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
898 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
899 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
901 /* The size of the temporary may be too large to fit into an integer. */
902 /* ??? Not sure this should happen except for user silliness, so limit
903 this to things that aren't compiler-generated temporaries. The
904 rest of the time we'll abort in assign_stack_temp_for_type. */
905 if (decl && size == -1
906 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
908 error_with_decl (decl, "size of variable `%s' is too large");
909 size = 1;
912 tmp = assign_stack_temp_for_type (mode, size, keep, type);
913 return tmp;
916 #ifndef PROMOTE_FOR_CALL_ONLY
917 if (! dont_promote)
918 mode = promote_mode (type, mode, &unsignedp, 0);
919 #endif
921 return gen_reg_rtx (mode);
924 /* Combine temporary stack slots which are adjacent on the stack.
926 This allows for better use of already allocated stack space. This is only
927 done for BLKmode slots because we can be sure that we won't have alignment
928 problems in this case. */
930 void
931 combine_temp_slots ()
933 struct temp_slot *p, *q;
934 struct temp_slot *prev_p, *prev_q;
935 int num_slots;
937 /* We can't combine slots, because the information about which slot
938 is in which alias set will be lost. */
939 if (flag_strict_aliasing)
940 return;
942 /* If there are a lot of temp slots, don't do anything unless
943 high levels of optimization. */
944 if (! flag_expensive_optimizations)
945 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
946 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
947 return;
949 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
951 int delete_p = 0;
953 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
954 for (q = p->next, prev_q = p; q; q = prev_q->next)
956 int delete_q = 0;
957 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
959 if (p->base_offset + p->full_size == q->base_offset)
961 /* Q comes after P; combine Q into P. */
962 p->size += q->size;
963 p->full_size += q->full_size;
964 delete_q = 1;
966 else if (q->base_offset + q->full_size == p->base_offset)
968 /* P comes after Q; combine P into Q. */
969 q->size += p->size;
970 q->full_size += p->full_size;
971 delete_p = 1;
972 break;
975 /* Either delete Q or advance past it. */
976 if (delete_q)
977 prev_q->next = q->next;
978 else
979 prev_q = q;
981 /* Either delete P or advance past it. */
982 if (delete_p)
984 if (prev_p)
985 prev_p->next = p->next;
986 else
987 temp_slots = p->next;
989 else
990 prev_p = p;
994 /* Find the temp slot corresponding to the object at address X. */
996 static struct temp_slot *
997 find_temp_slot_from_address (x)
998 rtx x;
1000 struct temp_slot *p;
1001 rtx next;
1003 for (p = temp_slots; p; p = p->next)
1005 if (! p->in_use)
1006 continue;
1008 else if (XEXP (p->slot, 0) == x
1009 || p->address == x
1010 || (GET_CODE (x) == PLUS
1011 && XEXP (x, 0) == virtual_stack_vars_rtx
1012 && GET_CODE (XEXP (x, 1)) == CONST_INT
1013 && INTVAL (XEXP (x, 1)) >= p->base_offset
1014 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1015 return p;
1017 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1018 for (next = p->address; next; next = XEXP (next, 1))
1019 if (XEXP (next, 0) == x)
1020 return p;
1023 /* If we have a sum involving a register, see if it points to a temp
1024 slot. */
1025 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1026 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1027 return p;
1028 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1029 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1030 return p;
1032 return 0;
1035 /* Indicate that NEW is an alternate way of referring to the temp slot
1036 that previously was known by OLD. */
1038 void
1039 update_temp_slot_address (old, new)
1040 rtx old, new;
1042 struct temp_slot *p;
1044 if (rtx_equal_p (old, new))
1045 return;
1047 p = find_temp_slot_from_address (old);
1049 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1050 is a register, see if one operand of the PLUS is a temporary
1051 location. If so, NEW points into it. Otherwise, if both OLD and
1052 NEW are a PLUS and if there is a register in common between them.
1053 If so, try a recursive call on those values. */
1054 if (p == 0)
1056 if (GET_CODE (old) != PLUS)
1057 return;
1059 if (GET_CODE (new) == REG)
1061 update_temp_slot_address (XEXP (old, 0), new);
1062 update_temp_slot_address (XEXP (old, 1), new);
1063 return;
1065 else if (GET_CODE (new) != PLUS)
1066 return;
1068 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1069 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1070 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1071 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1072 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1073 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1074 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1075 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1077 return;
1080 /* Otherwise add an alias for the temp's address. */
1081 else if (p->address == 0)
1082 p->address = new;
1083 else
1085 if (GET_CODE (p->address) != EXPR_LIST)
1086 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1088 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1092 /* If X could be a reference to a temporary slot, mark the fact that its
1093 address was taken. */
1095 void
1096 mark_temp_addr_taken (x)
1097 rtx x;
1099 struct temp_slot *p;
1101 if (x == 0)
1102 return;
1104 /* If X is not in memory or is at a constant address, it cannot be in
1105 a temporary slot. */
1106 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1107 return;
1109 p = find_temp_slot_from_address (XEXP (x, 0));
1110 if (p != 0)
1111 p->addr_taken = 1;
1114 /* If X could be a reference to a temporary slot, mark that slot as
1115 belonging to the to one level higher than the current level. If X
1116 matched one of our slots, just mark that one. Otherwise, we can't
1117 easily predict which it is, so upgrade all of them. Kept slots
1118 need not be touched.
1120 This is called when an ({...}) construct occurs and a statement
1121 returns a value in memory. */
1123 void
1124 preserve_temp_slots (x)
1125 rtx x;
1127 struct temp_slot *p = 0;
1129 /* If there is no result, we still might have some objects whose address
1130 were taken, so we need to make sure they stay around. */
1131 if (x == 0)
1133 for (p = temp_slots; p; p = p->next)
1134 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1135 p->level--;
1137 return;
1140 /* If X is a register that is being used as a pointer, see if we have
1141 a temporary slot we know it points to. To be consistent with
1142 the code below, we really should preserve all non-kept slots
1143 if we can't find a match, but that seems to be much too costly. */
1144 if (GET_CODE (x) == REG && REG_POINTER (x))
1145 p = find_temp_slot_from_address (x);
1147 /* If X is not in memory or is at a constant address, it cannot be in
1148 a temporary slot, but it can contain something whose address was
1149 taken. */
1150 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1152 for (p = temp_slots; p; p = p->next)
1153 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1154 p->level--;
1156 return;
1159 /* First see if we can find a match. */
1160 if (p == 0)
1161 p = find_temp_slot_from_address (XEXP (x, 0));
1163 if (p != 0)
1165 /* Move everything at our level whose address was taken to our new
1166 level in case we used its address. */
1167 struct temp_slot *q;
1169 if (p->level == temp_slot_level)
1171 for (q = temp_slots; q; q = q->next)
1172 if (q != p && q->addr_taken && q->level == p->level)
1173 q->level--;
1175 p->level--;
1176 p->addr_taken = 0;
1178 return;
1181 /* Otherwise, preserve all non-kept slots at this level. */
1182 for (p = temp_slots; p; p = p->next)
1183 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1184 p->level--;
1187 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1188 with that RTL_EXPR, promote it into a temporary slot at the present
1189 level so it will not be freed when we free slots made in the
1190 RTL_EXPR. */
1192 void
1193 preserve_rtl_expr_result (x)
1194 rtx x;
1196 struct temp_slot *p;
1198 /* If X is not in memory or is at a constant address, it cannot be in
1199 a temporary slot. */
1200 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1201 return;
1203 /* If we can find a match, move it to our level unless it is already at
1204 an upper level. */
1205 p = find_temp_slot_from_address (XEXP (x, 0));
1206 if (p != 0)
1208 p->level = MIN (p->level, temp_slot_level);
1209 p->rtl_expr = 0;
1212 return;
1215 /* Free all temporaries used so far. This is normally called at the end
1216 of generating code for a statement. Don't free any temporaries
1217 currently in use for an RTL_EXPR that hasn't yet been emitted.
1218 We could eventually do better than this since it can be reused while
1219 generating the same RTL_EXPR, but this is complex and probably not
1220 worthwhile. */
1222 void
1223 free_temp_slots ()
1225 struct temp_slot *p;
1227 for (p = temp_slots; p; p = p->next)
1228 if (p->in_use && p->level == temp_slot_level && ! p->keep
1229 && p->rtl_expr == 0)
1230 p->in_use = 0;
1232 combine_temp_slots ();
1235 /* Free all temporary slots used in T, an RTL_EXPR node. */
1237 void
1238 free_temps_for_rtl_expr (t)
1239 tree t;
1241 struct temp_slot *p;
1243 for (p = temp_slots; p; p = p->next)
1244 if (p->rtl_expr == t)
1246 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1247 needs to be preserved. This can happen if a temporary in
1248 the RTL_EXPR was addressed; preserve_temp_slots will move
1249 the temporary into a higher level. */
1250 if (temp_slot_level <= p->level)
1251 p->in_use = 0;
1252 else
1253 p->rtl_expr = NULL_TREE;
1256 combine_temp_slots ();
1259 /* Mark all temporaries ever allocated in this function as not suitable
1260 for reuse until the current level is exited. */
1262 void
1263 mark_all_temps_used ()
1265 struct temp_slot *p;
1267 for (p = temp_slots; p; p = p->next)
1269 p->in_use = p->keep = 1;
1270 p->level = MIN (p->level, temp_slot_level);
1274 /* Push deeper into the nesting level for stack temporaries. */
1276 void
1277 push_temp_slots ()
1279 temp_slot_level++;
1282 /* Likewise, but save the new level as the place to allocate variables
1283 for blocks. */
1285 #if 0
1286 void
1287 push_temp_slots_for_block ()
1289 push_temp_slots ();
1291 var_temp_slot_level = temp_slot_level;
1294 /* Likewise, but save the new level as the place to allocate temporaries
1295 for TARGET_EXPRs. */
1297 void
1298 push_temp_slots_for_target ()
1300 push_temp_slots ();
1302 target_temp_slot_level = temp_slot_level;
1305 /* Set and get the value of target_temp_slot_level. The only
1306 permitted use of these functions is to save and restore this value. */
1309 get_target_temp_slot_level ()
1311 return target_temp_slot_level;
1314 void
1315 set_target_temp_slot_level (level)
1316 int level;
1318 target_temp_slot_level = level;
1320 #endif
1322 /* Pop a temporary nesting level. All slots in use in the current level
1323 are freed. */
1325 void
1326 pop_temp_slots ()
1328 struct temp_slot *p;
1330 for (p = temp_slots; p; p = p->next)
1331 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1332 p->in_use = 0;
1334 combine_temp_slots ();
1336 temp_slot_level--;
1339 /* Initialize temporary slots. */
1341 void
1342 init_temp_slots ()
1344 /* We have not allocated any temporaries yet. */
1345 temp_slots = 0;
1346 temp_slot_level = 0;
1347 var_temp_slot_level = 0;
1348 target_temp_slot_level = 0;
1351 /* Retroactively move an auto variable from a register to a stack slot.
1352 This is done when an address-reference to the variable is seen. */
1354 void
1355 put_var_into_stack (decl)
1356 tree decl;
1358 rtx reg;
1359 enum machine_mode promoted_mode, decl_mode;
1360 struct function *function = 0;
1361 tree context;
1362 int can_use_addressof;
1363 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1364 int usedp = (TREE_USED (decl)
1365 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1367 context = decl_function_context (decl);
1369 /* Get the current rtl used for this object and its original mode. */
1370 reg = (TREE_CODE (decl) == SAVE_EXPR
1371 ? SAVE_EXPR_RTL (decl)
1372 : DECL_RTL_IF_SET (decl));
1374 /* No need to do anything if decl has no rtx yet
1375 since in that case caller is setting TREE_ADDRESSABLE
1376 and a stack slot will be assigned when the rtl is made. */
1377 if (reg == 0)
1378 return;
1380 /* Get the declared mode for this object. */
1381 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1382 : DECL_MODE (decl));
1383 /* Get the mode it's actually stored in. */
1384 promoted_mode = GET_MODE (reg);
1386 /* If this variable comes from an outer function, find that
1387 function's saved context. Don't use find_function_data here,
1388 because it might not be in any active function.
1389 FIXME: Is that really supposed to happen?
1390 It does in ObjC at least. */
1391 if (context != current_function_decl && context != inline_function_decl)
1392 for (function = outer_function_chain; function; function = function->outer)
1393 if (function->decl == context)
1394 break;
1396 /* If this is a variable-size object with a pseudo to address it,
1397 put that pseudo into the stack, if the var is nonlocal. */
1398 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1399 && GET_CODE (reg) == MEM
1400 && GET_CODE (XEXP (reg, 0)) == REG
1401 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1403 reg = XEXP (reg, 0);
1404 decl_mode = promoted_mode = GET_MODE (reg);
1407 can_use_addressof
1408 = (function == 0
1409 && optimize > 0
1410 /* FIXME make it work for promoted modes too */
1411 && decl_mode == promoted_mode
1412 #ifdef NON_SAVING_SETJMP
1413 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1414 #endif
1417 /* If we can't use ADDRESSOF, make sure we see through one we already
1418 generated. */
1419 if (! can_use_addressof && GET_CODE (reg) == MEM
1420 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1421 reg = XEXP (XEXP (reg, 0), 0);
1423 /* Now we should have a value that resides in one or more pseudo regs. */
1425 if (GET_CODE (reg) == REG)
1427 /* If this variable lives in the current function and we don't need
1428 to put things in the stack for the sake of setjmp, try to keep it
1429 in a register until we know we actually need the address. */
1430 if (can_use_addressof)
1431 gen_mem_addressof (reg, decl);
1432 else
1433 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1434 decl_mode, volatilep, 0, usedp, 0);
1436 else if (GET_CODE (reg) == CONCAT)
1438 /* A CONCAT contains two pseudos; put them both in the stack.
1439 We do it so they end up consecutive.
1440 We fixup references to the parts only after we fixup references
1441 to the whole CONCAT, lest we do double fixups for the latter
1442 references. */
1443 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1444 tree part_type = type_for_mode (part_mode, 0);
1445 rtx lopart = XEXP (reg, 0);
1446 rtx hipart = XEXP (reg, 1);
1447 #ifdef FRAME_GROWS_DOWNWARD
1448 /* Since part 0 should have a lower address, do it second. */
1449 put_reg_into_stack (function, hipart, part_type, part_mode,
1450 part_mode, volatilep, 0, 0, 0);
1451 put_reg_into_stack (function, lopart, part_type, part_mode,
1452 part_mode, volatilep, 0, 0, 0);
1453 #else
1454 put_reg_into_stack (function, lopart, part_type, part_mode,
1455 part_mode, volatilep, 0, 0, 0);
1456 put_reg_into_stack (function, hipart, part_type, part_mode,
1457 part_mode, volatilep, 0, 0, 0);
1458 #endif
1460 /* Change the CONCAT into a combined MEM for both parts. */
1461 PUT_CODE (reg, MEM);
1462 MEM_ATTRS (reg) = 0;
1464 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1465 already computed alias sets. Here we want to re-generate. */
1466 if (DECL_P (decl))
1467 SET_DECL_RTL (decl, NULL);
1468 set_mem_attributes (reg, decl, 1);
1469 if (DECL_P (decl))
1470 SET_DECL_RTL (decl, reg);
1472 /* The two parts are in memory order already.
1473 Use the lower parts address as ours. */
1474 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1475 /* Prevent sharing of rtl that might lose. */
1476 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1477 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1478 if (usedp)
1480 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1481 promoted_mode, 0);
1482 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1483 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1486 else
1487 return;
1490 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1491 into the stack frame of FUNCTION (0 means the current function).
1492 DECL_MODE is the machine mode of the user-level data type.
1493 PROMOTED_MODE is the machine mode of the register.
1494 VOLATILE_P is nonzero if this is for a "volatile" decl.
1495 USED_P is nonzero if this reg might have already been used in an insn. */
1497 static void
1498 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1499 original_regno, used_p, ht)
1500 struct function *function;
1501 rtx reg;
1502 tree type;
1503 enum machine_mode promoted_mode, decl_mode;
1504 int volatile_p;
1505 unsigned int original_regno;
1506 int used_p;
1507 struct hash_table *ht;
1509 struct function *func = function ? function : cfun;
1510 rtx new = 0;
1511 unsigned int regno = original_regno;
1513 if (regno == 0)
1514 regno = REGNO (reg);
1516 if (regno < func->x_max_parm_reg)
1517 new = func->x_parm_reg_stack_loc[regno];
1519 if (new == 0)
1520 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1522 PUT_CODE (reg, MEM);
1523 PUT_MODE (reg, decl_mode);
1524 XEXP (reg, 0) = XEXP (new, 0);
1525 MEM_ATTRS (reg) = 0;
1526 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1527 MEM_VOLATILE_P (reg) = volatile_p;
1529 /* If this is a memory ref that contains aggregate components,
1530 mark it as such for cse and loop optimize. If we are reusing a
1531 previously generated stack slot, then we need to copy the bit in
1532 case it was set for other reasons. For instance, it is set for
1533 __builtin_va_alist. */
1534 if (type)
1536 MEM_SET_IN_STRUCT_P (reg,
1537 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1538 set_mem_alias_set (reg, get_alias_set (type));
1541 if (used_p)
1542 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1545 /* Make sure that all refs to the variable, previously made
1546 when it was a register, are fixed up to be valid again.
1547 See function above for meaning of arguments. */
1549 static void
1550 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1551 struct function *function;
1552 rtx reg;
1553 tree type;
1554 enum machine_mode promoted_mode;
1555 struct hash_table *ht;
1557 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1559 if (function != 0)
1561 struct var_refs_queue *temp;
1563 temp
1564 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1565 temp->modified = reg;
1566 temp->promoted_mode = promoted_mode;
1567 temp->unsignedp = unsigned_p;
1568 temp->next = function->fixup_var_refs_queue;
1569 function->fixup_var_refs_queue = temp;
1571 else
1572 /* Variable is local; fix it up now. */
1573 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1576 static void
1577 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1578 rtx var;
1579 enum machine_mode promoted_mode;
1580 int unsignedp;
1581 struct hash_table *ht;
1582 rtx may_share;
1584 tree pending;
1585 rtx first_insn = get_insns ();
1586 struct sequence_stack *stack = seq_stack;
1587 tree rtl_exps = rtl_expr_chain;
1589 /* If there's a hash table, it must record all uses of VAR. */
1590 if (ht)
1592 if (stack != 0)
1593 abort ();
1594 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1595 may_share);
1596 return;
1599 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1600 stack == 0, may_share);
1602 /* Scan all pending sequences too. */
1603 for (; stack; stack = stack->next)
1605 push_to_full_sequence (stack->first, stack->last);
1606 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1607 stack->next != 0, may_share);
1608 /* Update remembered end of sequence
1609 in case we added an insn at the end. */
1610 stack->last = get_last_insn ();
1611 end_sequence ();
1614 /* Scan all waiting RTL_EXPRs too. */
1615 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1617 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1618 if (seq != const0_rtx && seq != 0)
1620 push_to_sequence (seq);
1621 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1622 may_share);
1623 end_sequence ();
1628 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1629 some part of an insn. Return a struct fixup_replacement whose OLD
1630 value is equal to X. Allocate a new structure if no such entry exists. */
1632 static struct fixup_replacement *
1633 find_fixup_replacement (replacements, x)
1634 struct fixup_replacement **replacements;
1635 rtx x;
1637 struct fixup_replacement *p;
1639 /* See if we have already replaced this. */
1640 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1643 if (p == 0)
1645 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1646 p->old = x;
1647 p->new = 0;
1648 p->next = *replacements;
1649 *replacements = p;
1652 return p;
1655 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1656 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1657 for the current function. MAY_SHARE is either a MEM that is not
1658 to be unshared or a list of them. */
1660 static void
1661 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1662 rtx insn;
1663 rtx var;
1664 enum machine_mode promoted_mode;
1665 int unsignedp;
1666 int toplevel;
1667 rtx may_share;
1669 while (insn)
1671 /* fixup_var_refs_insn might modify insn, so save its next
1672 pointer now. */
1673 rtx next = NEXT_INSN (insn);
1675 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1676 the three sequences they (potentially) contain, and process
1677 them recursively. The CALL_INSN itself is not interesting. */
1679 if (GET_CODE (insn) == CALL_INSN
1680 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1682 int i;
1684 /* Look at the Normal call, sibling call and tail recursion
1685 sequences attached to the CALL_PLACEHOLDER. */
1686 for (i = 0; i < 3; i++)
1688 rtx seq = XEXP (PATTERN (insn), i);
1689 if (seq)
1691 push_to_sequence (seq);
1692 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1693 may_share);
1694 XEXP (PATTERN (insn), i) = get_insns ();
1695 end_sequence ();
1700 else if (INSN_P (insn))
1701 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1702 may_share);
1704 insn = next;
1708 /* Look up the insns which reference VAR in HT and fix them up. Other
1709 arguments are the same as fixup_var_refs_insns.
1711 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1712 because the hash table will point straight to the interesting insn
1713 (inside the CALL_PLACEHOLDER). */
1715 static void
1716 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1717 struct hash_table *ht;
1718 rtx var;
1719 enum machine_mode promoted_mode;
1720 int unsignedp;
1721 rtx may_share;
1723 struct insns_for_mem_entry *ime
1724 = (struct insns_for_mem_entry *) hash_lookup (ht, var,
1725 /*create=*/0, /*copy=*/0);
1726 rtx insn_list;
1728 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1729 if (INSN_P (XEXP (insn_list, 0)))
1730 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1731 unsignedp, 1, may_share);
1735 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1736 the insn under examination, VAR is the variable to fix up
1737 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1738 TOPLEVEL is nonzero if this is the main insn chain for this
1739 function. */
1741 static void
1742 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1743 rtx insn;
1744 rtx var;
1745 enum machine_mode promoted_mode;
1746 int unsignedp;
1747 int toplevel;
1748 rtx no_share;
1750 rtx call_dest = 0;
1751 rtx set, prev, prev_set;
1752 rtx note;
1754 /* Remember the notes in case we delete the insn. */
1755 note = REG_NOTES (insn);
1757 /* If this is a CLOBBER of VAR, delete it.
1759 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1760 and REG_RETVAL notes too. */
1761 if (GET_CODE (PATTERN (insn)) == CLOBBER
1762 && (XEXP (PATTERN (insn), 0) == var
1763 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1764 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1765 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1767 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1768 /* The REG_LIBCALL note will go away since we are going to
1769 turn INSN into a NOTE, so just delete the
1770 corresponding REG_RETVAL note. */
1771 remove_note (XEXP (note, 0),
1772 find_reg_note (XEXP (note, 0), REG_RETVAL,
1773 NULL_RTX));
1775 delete_insn (insn);
1778 /* The insn to load VAR from a home in the arglist
1779 is now a no-op. When we see it, just delete it.
1780 Similarly if this is storing VAR from a register from which
1781 it was loaded in the previous insn. This will occur
1782 when an ADDRESSOF was made for an arglist slot. */
1783 else if (toplevel
1784 && (set = single_set (insn)) != 0
1785 && SET_DEST (set) == var
1786 /* If this represents the result of an insn group,
1787 don't delete the insn. */
1788 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1789 && (rtx_equal_p (SET_SRC (set), var)
1790 || (GET_CODE (SET_SRC (set)) == REG
1791 && (prev = prev_nonnote_insn (insn)) != 0
1792 && (prev_set = single_set (prev)) != 0
1793 && SET_DEST (prev_set) == SET_SRC (set)
1794 && rtx_equal_p (SET_SRC (prev_set), var))))
1796 delete_insn (insn);
1798 else
1800 struct fixup_replacement *replacements = 0;
1801 rtx next_insn = NEXT_INSN (insn);
1803 if (SMALL_REGISTER_CLASSES)
1805 /* If the insn that copies the results of a CALL_INSN
1806 into a pseudo now references VAR, we have to use an
1807 intermediate pseudo since we want the life of the
1808 return value register to be only a single insn.
1810 If we don't use an intermediate pseudo, such things as
1811 address computations to make the address of VAR valid
1812 if it is not can be placed between the CALL_INSN and INSN.
1814 To make sure this doesn't happen, we record the destination
1815 of the CALL_INSN and see if the next insn uses both that
1816 and VAR. */
1818 if (call_dest != 0 && GET_CODE (insn) == INSN
1819 && reg_mentioned_p (var, PATTERN (insn))
1820 && reg_mentioned_p (call_dest, PATTERN (insn)))
1822 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1824 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1826 PATTERN (insn) = replace_rtx (PATTERN (insn),
1827 call_dest, temp);
1830 if (GET_CODE (insn) == CALL_INSN
1831 && GET_CODE (PATTERN (insn)) == SET)
1832 call_dest = SET_DEST (PATTERN (insn));
1833 else if (GET_CODE (insn) == CALL_INSN
1834 && GET_CODE (PATTERN (insn)) == PARALLEL
1835 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1836 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1837 else
1838 call_dest = 0;
1841 /* See if we have to do anything to INSN now that VAR is in
1842 memory. If it needs to be loaded into a pseudo, use a single
1843 pseudo for the entire insn in case there is a MATCH_DUP
1844 between two operands. We pass a pointer to the head of
1845 a list of struct fixup_replacements. If fixup_var_refs_1
1846 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1847 it will record them in this list.
1849 If it allocated a pseudo for any replacement, we copy into
1850 it here. */
1852 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1853 &replacements, no_share);
1855 /* If this is last_parm_insn, and any instructions were output
1856 after it to fix it up, then we must set last_parm_insn to
1857 the last such instruction emitted. */
1858 if (insn == last_parm_insn)
1859 last_parm_insn = PREV_INSN (next_insn);
1861 while (replacements)
1863 struct fixup_replacement *next;
1865 if (GET_CODE (replacements->new) == REG)
1867 rtx insert_before;
1868 rtx seq;
1870 /* OLD might be a (subreg (mem)). */
1871 if (GET_CODE (replacements->old) == SUBREG)
1872 replacements->old
1873 = fixup_memory_subreg (replacements->old, insn,
1874 promoted_mode, 0);
1875 else
1876 replacements->old
1877 = fixup_stack_1 (replacements->old, insn);
1879 insert_before = insn;
1881 /* If we are changing the mode, do a conversion.
1882 This might be wasteful, but combine.c will
1883 eliminate much of the waste. */
1885 if (GET_MODE (replacements->new)
1886 != GET_MODE (replacements->old))
1888 start_sequence ();
1889 convert_move (replacements->new,
1890 replacements->old, unsignedp);
1891 seq = gen_sequence ();
1892 end_sequence ();
1894 else
1895 seq = gen_move_insn (replacements->new,
1896 replacements->old);
1898 emit_insn_before (seq, insert_before);
1901 next = replacements->next;
1902 free (replacements);
1903 replacements = next;
1907 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1908 But don't touch other insns referred to by reg-notes;
1909 we will get them elsewhere. */
1910 while (note)
1912 if (GET_CODE (note) != INSN_LIST)
1913 XEXP (note, 0)
1914 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1915 promoted_mode, 1);
1916 note = XEXP (note, 1);
1920 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1921 See if the rtx expression at *LOC in INSN needs to be changed.
1923 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1924 contain a list of original rtx's and replacements. If we find that we need
1925 to modify this insn by replacing a memory reference with a pseudo or by
1926 making a new MEM to implement a SUBREG, we consult that list to see if
1927 we have already chosen a replacement. If none has already been allocated,
1928 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1929 or the SUBREG, as appropriate, to the pseudo. */
1931 static void
1932 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1933 rtx var;
1934 enum machine_mode promoted_mode;
1935 rtx *loc;
1936 rtx insn;
1937 struct fixup_replacement **replacements;
1938 rtx no_share;
1940 int i;
1941 rtx x = *loc;
1942 RTX_CODE code = GET_CODE (x);
1943 const char *fmt;
1944 rtx tem, tem1;
1945 struct fixup_replacement *replacement;
1947 switch (code)
1949 case ADDRESSOF:
1950 if (XEXP (x, 0) == var)
1952 /* Prevent sharing of rtl that might lose. */
1953 rtx sub = copy_rtx (XEXP (var, 0));
1955 if (! validate_change (insn, loc, sub, 0))
1957 rtx y = gen_reg_rtx (GET_MODE (sub));
1958 rtx seq, new_insn;
1960 /* We should be able to replace with a register or all is lost.
1961 Note that we can't use validate_change to verify this, since
1962 we're not caring for replacing all dups simultaneously. */
1963 if (! validate_replace_rtx (*loc, y, insn))
1964 abort ();
1966 /* Careful! First try to recognize a direct move of the
1967 value, mimicking how things are done in gen_reload wrt
1968 PLUS. Consider what happens when insn is a conditional
1969 move instruction and addsi3 clobbers flags. */
1971 start_sequence ();
1972 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1973 seq = gen_sequence ();
1974 end_sequence ();
1976 if (recog_memoized (new_insn) < 0)
1978 /* That failed. Fall back on force_operand and hope. */
1980 start_sequence ();
1981 sub = force_operand (sub, y);
1982 if (sub != y)
1983 emit_insn (gen_move_insn (y, sub));
1984 seq = gen_sequence ();
1985 end_sequence ();
1988 #ifdef HAVE_cc0
1989 /* Don't separate setter from user. */
1990 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1991 insn = PREV_INSN (insn);
1992 #endif
1994 emit_insn_before (seq, insn);
1997 return;
1999 case MEM:
2000 if (var == x)
2002 /* If we already have a replacement, use it. Otherwise,
2003 try to fix up this address in case it is invalid. */
2005 replacement = find_fixup_replacement (replacements, var);
2006 if (replacement->new)
2008 *loc = replacement->new;
2009 return;
2012 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2014 /* Unless we are forcing memory to register or we changed the mode,
2015 we can leave things the way they are if the insn is valid. */
2017 INSN_CODE (insn) = -1;
2018 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2019 && recog_memoized (insn) >= 0)
2020 return;
2022 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2023 return;
2026 /* If X contains VAR, we need to unshare it here so that we update
2027 each occurrence separately. But all identical MEMs in one insn
2028 must be replaced with the same rtx because of the possibility of
2029 MATCH_DUPs. */
2031 if (reg_mentioned_p (var, x))
2033 replacement = find_fixup_replacement (replacements, x);
2034 if (replacement->new == 0)
2035 replacement->new = copy_most_rtx (x, no_share);
2037 *loc = x = replacement->new;
2038 code = GET_CODE (x);
2040 break;
2042 case REG:
2043 case CC0:
2044 case PC:
2045 case CONST_INT:
2046 case CONST:
2047 case SYMBOL_REF:
2048 case LABEL_REF:
2049 case CONST_DOUBLE:
2050 case CONST_VECTOR:
2051 return;
2053 case SIGN_EXTRACT:
2054 case ZERO_EXTRACT:
2055 /* Note that in some cases those types of expressions are altered
2056 by optimize_bit_field, and do not survive to get here. */
2057 if (XEXP (x, 0) == var
2058 || (GET_CODE (XEXP (x, 0)) == SUBREG
2059 && SUBREG_REG (XEXP (x, 0)) == var))
2061 /* Get TEM as a valid MEM in the mode presently in the insn.
2063 We don't worry about the possibility of MATCH_DUP here; it
2064 is highly unlikely and would be tricky to handle. */
2066 tem = XEXP (x, 0);
2067 if (GET_CODE (tem) == SUBREG)
2069 if (GET_MODE_BITSIZE (GET_MODE (tem))
2070 > GET_MODE_BITSIZE (GET_MODE (var)))
2072 replacement = find_fixup_replacement (replacements, var);
2073 if (replacement->new == 0)
2074 replacement->new = gen_reg_rtx (GET_MODE (var));
2075 SUBREG_REG (tem) = replacement->new;
2077 /* The following code works only if we have a MEM, so we
2078 need to handle the subreg here. We directly substitute
2079 it assuming that a subreg must be OK here. We already
2080 scheduled a replacement to copy the mem into the
2081 subreg. */
2082 XEXP (x, 0) = tem;
2083 return;
2085 else
2086 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2088 else
2089 tem = fixup_stack_1 (tem, insn);
2091 /* Unless we want to load from memory, get TEM into the proper mode
2092 for an extract from memory. This can only be done if the
2093 extract is at a constant position and length. */
2095 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2096 && GET_CODE (XEXP (x, 2)) == CONST_INT
2097 && ! mode_dependent_address_p (XEXP (tem, 0))
2098 && ! MEM_VOLATILE_P (tem))
2100 enum machine_mode wanted_mode = VOIDmode;
2101 enum machine_mode is_mode = GET_MODE (tem);
2102 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2104 if (GET_CODE (x) == ZERO_EXTRACT)
2106 enum machine_mode new_mode
2107 = mode_for_extraction (EP_extzv, 1);
2108 if (new_mode != MAX_MACHINE_MODE)
2109 wanted_mode = new_mode;
2111 else if (GET_CODE (x) == SIGN_EXTRACT)
2113 enum machine_mode new_mode
2114 = mode_for_extraction (EP_extv, 1);
2115 if (new_mode != MAX_MACHINE_MODE)
2116 wanted_mode = new_mode;
2119 /* If we have a narrower mode, we can do something. */
2120 if (wanted_mode != VOIDmode
2121 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2123 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2124 rtx old_pos = XEXP (x, 2);
2125 rtx newmem;
2127 /* If the bytes and bits are counted differently, we
2128 must adjust the offset. */
2129 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2130 offset = (GET_MODE_SIZE (is_mode)
2131 - GET_MODE_SIZE (wanted_mode) - offset);
2133 pos %= GET_MODE_BITSIZE (wanted_mode);
2135 newmem = adjust_address_nv (tem, wanted_mode, offset);
2137 /* Make the change and see if the insn remains valid. */
2138 INSN_CODE (insn) = -1;
2139 XEXP (x, 0) = newmem;
2140 XEXP (x, 2) = GEN_INT (pos);
2142 if (recog_memoized (insn) >= 0)
2143 return;
2145 /* Otherwise, restore old position. XEXP (x, 0) will be
2146 restored later. */
2147 XEXP (x, 2) = old_pos;
2151 /* If we get here, the bitfield extract insn can't accept a memory
2152 reference. Copy the input into a register. */
2154 tem1 = gen_reg_rtx (GET_MODE (tem));
2155 emit_insn_before (gen_move_insn (tem1, tem), insn);
2156 XEXP (x, 0) = tem1;
2157 return;
2159 break;
2161 case SUBREG:
2162 if (SUBREG_REG (x) == var)
2164 /* If this is a special SUBREG made because VAR was promoted
2165 from a wider mode, replace it with VAR and call ourself
2166 recursively, this time saying that the object previously
2167 had its current mode (by virtue of the SUBREG). */
2169 if (SUBREG_PROMOTED_VAR_P (x))
2171 *loc = var;
2172 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2173 no_share);
2174 return;
2177 /* If this SUBREG makes VAR wider, it has become a paradoxical
2178 SUBREG with VAR in memory, but these aren't allowed at this
2179 stage of the compilation. So load VAR into a pseudo and take
2180 a SUBREG of that pseudo. */
2181 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2183 replacement = find_fixup_replacement (replacements, var);
2184 if (replacement->new == 0)
2185 replacement->new = gen_reg_rtx (promoted_mode);
2186 SUBREG_REG (x) = replacement->new;
2187 return;
2190 /* See if we have already found a replacement for this SUBREG.
2191 If so, use it. Otherwise, make a MEM and see if the insn
2192 is recognized. If not, or if we should force MEM into a register,
2193 make a pseudo for this SUBREG. */
2194 replacement = find_fixup_replacement (replacements, x);
2195 if (replacement->new)
2197 *loc = replacement->new;
2198 return;
2201 replacement->new = *loc = fixup_memory_subreg (x, insn,
2202 promoted_mode, 0);
2204 INSN_CODE (insn) = -1;
2205 if (! flag_force_mem && recog_memoized (insn) >= 0)
2206 return;
2208 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2209 return;
2211 break;
2213 case SET:
2214 /* First do special simplification of bit-field references. */
2215 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2216 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2217 optimize_bit_field (x, insn, 0);
2218 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2219 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2220 optimize_bit_field (x, insn, 0);
2222 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2223 into a register and then store it back out. */
2224 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2225 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2226 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2227 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2228 > GET_MODE_SIZE (GET_MODE (var))))
2230 replacement = find_fixup_replacement (replacements, var);
2231 if (replacement->new == 0)
2232 replacement->new = gen_reg_rtx (GET_MODE (var));
2234 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2235 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2238 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2239 insn into a pseudo and store the low part of the pseudo into VAR. */
2240 if (GET_CODE (SET_DEST (x)) == SUBREG
2241 && SUBREG_REG (SET_DEST (x)) == var
2242 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2243 > GET_MODE_SIZE (GET_MODE (var))))
2245 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2246 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2247 tem)),
2248 insn);
2249 break;
2253 rtx dest = SET_DEST (x);
2254 rtx src = SET_SRC (x);
2255 rtx outerdest = dest;
2257 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2258 || GET_CODE (dest) == SIGN_EXTRACT
2259 || GET_CODE (dest) == ZERO_EXTRACT)
2260 dest = XEXP (dest, 0);
2262 if (GET_CODE (src) == SUBREG)
2263 src = SUBREG_REG (src);
2265 /* If VAR does not appear at the top level of the SET
2266 just scan the lower levels of the tree. */
2268 if (src != var && dest != var)
2269 break;
2271 /* We will need to rerecognize this insn. */
2272 INSN_CODE (insn) = -1;
2274 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2275 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2277 /* Since this case will return, ensure we fixup all the
2278 operands here. */
2279 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2280 insn, replacements, no_share);
2281 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2282 insn, replacements, no_share);
2283 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2284 insn, replacements, no_share);
2286 tem = XEXP (outerdest, 0);
2288 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2289 that may appear inside a ZERO_EXTRACT.
2290 This was legitimate when the MEM was a REG. */
2291 if (GET_CODE (tem) == SUBREG
2292 && SUBREG_REG (tem) == var)
2293 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2294 else
2295 tem = fixup_stack_1 (tem, insn);
2297 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2298 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2299 && ! mode_dependent_address_p (XEXP (tem, 0))
2300 && ! MEM_VOLATILE_P (tem))
2302 enum machine_mode wanted_mode;
2303 enum machine_mode is_mode = GET_MODE (tem);
2304 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2306 wanted_mode = mode_for_extraction (EP_insv, 0);
2308 /* If we have a narrower mode, we can do something. */
2309 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2311 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2312 rtx old_pos = XEXP (outerdest, 2);
2313 rtx newmem;
2315 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2316 offset = (GET_MODE_SIZE (is_mode)
2317 - GET_MODE_SIZE (wanted_mode) - offset);
2319 pos %= GET_MODE_BITSIZE (wanted_mode);
2321 newmem = adjust_address_nv (tem, wanted_mode, offset);
2323 /* Make the change and see if the insn remains valid. */
2324 INSN_CODE (insn) = -1;
2325 XEXP (outerdest, 0) = newmem;
2326 XEXP (outerdest, 2) = GEN_INT (pos);
2328 if (recog_memoized (insn) >= 0)
2329 return;
2331 /* Otherwise, restore old position. XEXP (x, 0) will be
2332 restored later. */
2333 XEXP (outerdest, 2) = old_pos;
2337 /* If we get here, the bit-field store doesn't allow memory
2338 or isn't located at a constant position. Load the value into
2339 a register, do the store, and put it back into memory. */
2341 tem1 = gen_reg_rtx (GET_MODE (tem));
2342 emit_insn_before (gen_move_insn (tem1, tem), insn);
2343 emit_insn_after (gen_move_insn (tem, tem1), insn);
2344 XEXP (outerdest, 0) = tem1;
2345 return;
2348 /* STRICT_LOW_PART is a no-op on memory references
2349 and it can cause combinations to be unrecognizable,
2350 so eliminate it. */
2352 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2353 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2355 /* A valid insn to copy VAR into or out of a register
2356 must be left alone, to avoid an infinite loop here.
2357 If the reference to VAR is by a subreg, fix that up,
2358 since SUBREG is not valid for a memref.
2359 Also fix up the address of the stack slot.
2361 Note that we must not try to recognize the insn until
2362 after we know that we have valid addresses and no
2363 (subreg (mem ...) ...) constructs, since these interfere
2364 with determining the validity of the insn. */
2366 if ((SET_SRC (x) == var
2367 || (GET_CODE (SET_SRC (x)) == SUBREG
2368 && SUBREG_REG (SET_SRC (x)) == var))
2369 && (GET_CODE (SET_DEST (x)) == REG
2370 || (GET_CODE (SET_DEST (x)) == SUBREG
2371 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2372 && GET_MODE (var) == promoted_mode
2373 && x == single_set (insn))
2375 rtx pat, last;
2377 if (GET_CODE (SET_SRC (x)) == SUBREG
2378 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2379 > GET_MODE_SIZE (GET_MODE (var))))
2381 /* This (subreg VAR) is now a paradoxical subreg. We need
2382 to replace VAR instead of the subreg. */
2383 replacement = find_fixup_replacement (replacements, var);
2384 if (replacement->new == NULL_RTX)
2385 replacement->new = gen_reg_rtx (GET_MODE (var));
2386 SUBREG_REG (SET_SRC (x)) = replacement->new;
2388 else
2390 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2391 if (replacement->new)
2392 SET_SRC (x) = replacement->new;
2393 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2394 SET_SRC (x) = replacement->new
2395 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2397 else
2398 SET_SRC (x) = replacement->new
2399 = fixup_stack_1 (SET_SRC (x), insn);
2402 if (recog_memoized (insn) >= 0)
2403 return;
2405 /* INSN is not valid, but we know that we want to
2406 copy SET_SRC (x) to SET_DEST (x) in some way. So
2407 we generate the move and see whether it requires more
2408 than one insn. If it does, we emit those insns and
2409 delete INSN. Otherwise, we an just replace the pattern
2410 of INSN; we have already verified above that INSN has
2411 no other function that to do X. */
2413 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2414 if (GET_CODE (pat) == SEQUENCE)
2416 last = emit_insn_before (pat, insn);
2418 /* INSN might have REG_RETVAL or other important notes, so
2419 we need to store the pattern of the last insn in the
2420 sequence into INSN similarly to the normal case. LAST
2421 should not have REG_NOTES, but we allow them if INSN has
2422 no REG_NOTES. */
2423 if (REG_NOTES (last) && REG_NOTES (insn))
2424 abort ();
2425 if (REG_NOTES (last))
2426 REG_NOTES (insn) = REG_NOTES (last);
2427 PATTERN (insn) = PATTERN (last);
2429 delete_insn (last);
2431 else
2432 PATTERN (insn) = pat;
2434 return;
2437 if ((SET_DEST (x) == var
2438 || (GET_CODE (SET_DEST (x)) == SUBREG
2439 && SUBREG_REG (SET_DEST (x)) == var))
2440 && (GET_CODE (SET_SRC (x)) == REG
2441 || (GET_CODE (SET_SRC (x)) == SUBREG
2442 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2443 && GET_MODE (var) == promoted_mode
2444 && x == single_set (insn))
2446 rtx pat, last;
2448 if (GET_CODE (SET_DEST (x)) == SUBREG)
2449 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2450 promoted_mode, 0);
2451 else
2452 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2454 if (recog_memoized (insn) >= 0)
2455 return;
2457 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2458 if (GET_CODE (pat) == SEQUENCE)
2460 last = emit_insn_before (pat, insn);
2462 /* INSN might have REG_RETVAL or other important notes, so
2463 we need to store the pattern of the last insn in the
2464 sequence into INSN similarly to the normal case. LAST
2465 should not have REG_NOTES, but we allow them if INSN has
2466 no REG_NOTES. */
2467 if (REG_NOTES (last) && REG_NOTES (insn))
2468 abort ();
2469 if (REG_NOTES (last))
2470 REG_NOTES (insn) = REG_NOTES (last);
2471 PATTERN (insn) = PATTERN (last);
2473 delete_insn (last);
2475 else
2476 PATTERN (insn) = pat;
2478 return;
2481 /* Otherwise, storing into VAR must be handled specially
2482 by storing into a temporary and copying that into VAR
2483 with a new insn after this one. Note that this case
2484 will be used when storing into a promoted scalar since
2485 the insn will now have different modes on the input
2486 and output and hence will be invalid (except for the case
2487 of setting it to a constant, which does not need any
2488 change if it is valid). We generate extra code in that case,
2489 but combine.c will eliminate it. */
2491 if (dest == var)
2493 rtx temp;
2494 rtx fixeddest = SET_DEST (x);
2495 enum machine_mode temp_mode;
2497 /* STRICT_LOW_PART can be discarded, around a MEM. */
2498 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2499 fixeddest = XEXP (fixeddest, 0);
2500 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2501 if (GET_CODE (fixeddest) == SUBREG)
2503 fixeddest = fixup_memory_subreg (fixeddest, insn,
2504 promoted_mode, 0);
2505 temp_mode = GET_MODE (fixeddest);
2507 else
2509 fixeddest = fixup_stack_1 (fixeddest, insn);
2510 temp_mode = promoted_mode;
2513 temp = gen_reg_rtx (temp_mode);
2515 emit_insn_after (gen_move_insn (fixeddest,
2516 gen_lowpart (GET_MODE (fixeddest),
2517 temp)),
2518 insn);
2520 SET_DEST (x) = temp;
2524 default:
2525 break;
2528 /* Nothing special about this RTX; fix its operands. */
2530 fmt = GET_RTX_FORMAT (code);
2531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2533 if (fmt[i] == 'e')
2534 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2535 no_share);
2536 else if (fmt[i] == 'E')
2538 int j;
2539 for (j = 0; j < XVECLEN (x, i); j++)
2540 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2541 insn, replacements, no_share);
2546 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2547 The REG was placed on the stack, so X now has the form (SUBREG:m1
2548 (MEM:m2 ...)).
2550 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2551 must be emitted to compute NEWADDR, put them before INSN.
2553 UNCRITICAL nonzero means accept paradoxical subregs.
2554 This is used for subregs found inside REG_NOTES. */
2556 static rtx
2557 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2558 rtx x;
2559 rtx insn;
2560 enum machine_mode promoted_mode;
2561 int uncritical;
2563 int offset;
2564 rtx mem = SUBREG_REG (x);
2565 rtx addr = XEXP (mem, 0);
2566 enum machine_mode mode = GET_MODE (x);
2567 rtx result;
2569 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2570 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2571 abort ();
2573 offset = SUBREG_BYTE (x);
2574 if (BYTES_BIG_ENDIAN)
2575 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2576 the offset so that it points to the right location within the
2577 MEM. */
2578 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2580 if (!flag_force_addr
2581 && memory_address_p (mode, plus_constant (addr, offset)))
2582 /* Shortcut if no insns need be emitted. */
2583 return adjust_address (mem, mode, offset);
2585 start_sequence ();
2586 result = adjust_address (mem, mode, offset);
2587 emit_insn_before (gen_sequence (), insn);
2588 end_sequence ();
2589 return result;
2592 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2593 Replace subexpressions of X in place.
2594 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2595 Otherwise return X, with its contents possibly altered.
2597 INSN, PROMOTED_MODE and UNCRITICAL are as for
2598 fixup_memory_subreg. */
2600 static rtx
2601 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2602 rtx x;
2603 rtx insn;
2604 enum machine_mode promoted_mode;
2605 int uncritical;
2607 enum rtx_code code;
2608 const char *fmt;
2609 int i;
2611 if (x == 0)
2612 return 0;
2614 code = GET_CODE (x);
2616 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2617 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2619 /* Nothing special about this RTX; fix its operands. */
2621 fmt = GET_RTX_FORMAT (code);
2622 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2624 if (fmt[i] == 'e')
2625 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2626 promoted_mode, uncritical);
2627 else if (fmt[i] == 'E')
2629 int j;
2630 for (j = 0; j < XVECLEN (x, i); j++)
2631 XVECEXP (x, i, j)
2632 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2633 promoted_mode, uncritical);
2636 return x;
2639 /* For each memory ref within X, if it refers to a stack slot
2640 with an out of range displacement, put the address in a temp register
2641 (emitting new insns before INSN to load these registers)
2642 and alter the memory ref to use that register.
2643 Replace each such MEM rtx with a copy, to avoid clobberage. */
2645 static rtx
2646 fixup_stack_1 (x, insn)
2647 rtx x;
2648 rtx insn;
2650 int i;
2651 RTX_CODE code = GET_CODE (x);
2652 const char *fmt;
2654 if (code == MEM)
2656 rtx ad = XEXP (x, 0);
2657 /* If we have address of a stack slot but it's not valid
2658 (displacement is too large), compute the sum in a register. */
2659 if (GET_CODE (ad) == PLUS
2660 && GET_CODE (XEXP (ad, 0)) == REG
2661 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2662 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2663 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2664 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2665 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2666 #endif
2667 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2668 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2669 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2670 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2672 rtx temp, seq;
2673 if (memory_address_p (GET_MODE (x), ad))
2674 return x;
2676 start_sequence ();
2677 temp = copy_to_reg (ad);
2678 seq = gen_sequence ();
2679 end_sequence ();
2680 emit_insn_before (seq, insn);
2681 return replace_equiv_address (x, temp);
2683 return x;
2686 fmt = GET_RTX_FORMAT (code);
2687 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2689 if (fmt[i] == 'e')
2690 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2691 else if (fmt[i] == 'E')
2693 int j;
2694 for (j = 0; j < XVECLEN (x, i); j++)
2695 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2698 return x;
2701 /* Optimization: a bit-field instruction whose field
2702 happens to be a byte or halfword in memory
2703 can be changed to a move instruction.
2705 We call here when INSN is an insn to examine or store into a bit-field.
2706 BODY is the SET-rtx to be altered.
2708 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2709 (Currently this is called only from function.c, and EQUIV_MEM
2710 is always 0.) */
2712 static void
2713 optimize_bit_field (body, insn, equiv_mem)
2714 rtx body;
2715 rtx insn;
2716 rtx *equiv_mem;
2718 rtx bitfield;
2719 int destflag;
2720 rtx seq = 0;
2721 enum machine_mode mode;
2723 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2724 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2725 bitfield = SET_DEST (body), destflag = 1;
2726 else
2727 bitfield = SET_SRC (body), destflag = 0;
2729 /* First check that the field being stored has constant size and position
2730 and is in fact a byte or halfword suitably aligned. */
2732 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2733 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2734 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2735 != BLKmode)
2736 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2738 rtx memref = 0;
2740 /* Now check that the containing word is memory, not a register,
2741 and that it is safe to change the machine mode. */
2743 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2744 memref = XEXP (bitfield, 0);
2745 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2746 && equiv_mem != 0)
2747 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2748 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2749 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2750 memref = SUBREG_REG (XEXP (bitfield, 0));
2751 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2752 && equiv_mem != 0
2753 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2754 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2756 if (memref
2757 && ! mode_dependent_address_p (XEXP (memref, 0))
2758 && ! MEM_VOLATILE_P (memref))
2760 /* Now adjust the address, first for any subreg'ing
2761 that we are now getting rid of,
2762 and then for which byte of the word is wanted. */
2764 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2765 rtx insns;
2767 /* Adjust OFFSET to count bits from low-address byte. */
2768 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2769 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2770 - offset - INTVAL (XEXP (bitfield, 1)));
2772 /* Adjust OFFSET to count bytes from low-address byte. */
2773 offset /= BITS_PER_UNIT;
2774 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2776 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2777 / UNITS_PER_WORD) * UNITS_PER_WORD;
2778 if (BYTES_BIG_ENDIAN)
2779 offset -= (MIN (UNITS_PER_WORD,
2780 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2781 - MIN (UNITS_PER_WORD,
2782 GET_MODE_SIZE (GET_MODE (memref))));
2785 start_sequence ();
2786 memref = adjust_address (memref, mode, offset);
2787 insns = get_insns ();
2788 end_sequence ();
2789 emit_insns_before (insns, insn);
2791 /* Store this memory reference where
2792 we found the bit field reference. */
2794 if (destflag)
2796 validate_change (insn, &SET_DEST (body), memref, 1);
2797 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2799 rtx src = SET_SRC (body);
2800 while (GET_CODE (src) == SUBREG
2801 && SUBREG_BYTE (src) == 0)
2802 src = SUBREG_REG (src);
2803 if (GET_MODE (src) != GET_MODE (memref))
2804 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2805 validate_change (insn, &SET_SRC (body), src, 1);
2807 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2808 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2809 /* This shouldn't happen because anything that didn't have
2810 one of these modes should have got converted explicitly
2811 and then referenced through a subreg.
2812 This is so because the original bit-field was
2813 handled by agg_mode and so its tree structure had
2814 the same mode that memref now has. */
2815 abort ();
2817 else
2819 rtx dest = SET_DEST (body);
2821 while (GET_CODE (dest) == SUBREG
2822 && SUBREG_BYTE (dest) == 0
2823 && (GET_MODE_CLASS (GET_MODE (dest))
2824 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2825 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2826 <= UNITS_PER_WORD))
2827 dest = SUBREG_REG (dest);
2829 validate_change (insn, &SET_DEST (body), dest, 1);
2831 if (GET_MODE (dest) == GET_MODE (memref))
2832 validate_change (insn, &SET_SRC (body), memref, 1);
2833 else
2835 /* Convert the mem ref to the destination mode. */
2836 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2838 start_sequence ();
2839 convert_move (newreg, memref,
2840 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2841 seq = get_insns ();
2842 end_sequence ();
2844 validate_change (insn, &SET_SRC (body), newreg, 1);
2848 /* See if we can convert this extraction or insertion into
2849 a simple move insn. We might not be able to do so if this
2850 was, for example, part of a PARALLEL.
2852 If we succeed, write out any needed conversions. If we fail,
2853 it is hard to guess why we failed, so don't do anything
2854 special; just let the optimization be suppressed. */
2856 if (apply_change_group () && seq)
2857 emit_insns_before (seq, insn);
2862 /* These routines are responsible for converting virtual register references
2863 to the actual hard register references once RTL generation is complete.
2865 The following four variables are used for communication between the
2866 routines. They contain the offsets of the virtual registers from their
2867 respective hard registers. */
2869 static int in_arg_offset;
2870 static int var_offset;
2871 static int dynamic_offset;
2872 static int out_arg_offset;
2873 static int cfa_offset;
2875 /* In most machines, the stack pointer register is equivalent to the bottom
2876 of the stack. */
2878 #ifndef STACK_POINTER_OFFSET
2879 #define STACK_POINTER_OFFSET 0
2880 #endif
2882 /* If not defined, pick an appropriate default for the offset of dynamically
2883 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2884 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2886 #ifndef STACK_DYNAMIC_OFFSET
2888 /* The bottom of the stack points to the actual arguments. If
2889 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2890 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2891 stack space for register parameters is not pushed by the caller, but
2892 rather part of the fixed stack areas and hence not included in
2893 `current_function_outgoing_args_size'. Nevertheless, we must allow
2894 for it when allocating stack dynamic objects. */
2896 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2897 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2898 ((ACCUMULATE_OUTGOING_ARGS \
2899 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2900 + (STACK_POINTER_OFFSET)) \
2902 #else
2903 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2904 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2905 + (STACK_POINTER_OFFSET))
2906 #endif
2907 #endif
2909 /* On most machines, the CFA coincides with the first incoming parm. */
2911 #ifndef ARG_POINTER_CFA_OFFSET
2912 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2913 #endif
2915 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2916 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2917 register, for later use if we do need to force REG into the stack. REG is
2918 overwritten by the MEM like in put_reg_into_stack. */
2921 gen_mem_addressof (reg, decl)
2922 rtx reg;
2923 tree decl;
2925 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2926 REGNO (reg), decl);
2928 /* Calculate this before we start messing with decl's RTL. */
2929 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2931 /* If the original REG was a user-variable, then so is the REG whose
2932 address is being taken. Likewise for unchanging. */
2933 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2934 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2936 PUT_CODE (reg, MEM);
2937 MEM_ATTRS (reg) = 0;
2938 XEXP (reg, 0) = r;
2940 if (decl)
2942 tree type = TREE_TYPE (decl);
2943 enum machine_mode decl_mode
2944 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2945 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2946 : DECL_RTL_IF_SET (decl));
2948 PUT_MODE (reg, decl_mode);
2950 /* Clear DECL_RTL momentarily so functions below will work
2951 properly, then set it again. */
2952 if (DECL_P (decl) && decl_rtl == reg)
2953 SET_DECL_RTL (decl, 0);
2955 set_mem_attributes (reg, decl, 1);
2956 set_mem_alias_set (reg, set);
2958 if (DECL_P (decl) && decl_rtl == reg)
2959 SET_DECL_RTL (decl, reg);
2961 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2962 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2964 else
2965 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2967 return reg;
2970 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2972 void
2973 flush_addressof (decl)
2974 tree decl;
2976 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2977 && DECL_RTL (decl) != 0
2978 && GET_CODE (DECL_RTL (decl)) == MEM
2979 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2980 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2981 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2984 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2986 static void
2987 put_addressof_into_stack (r, ht)
2988 rtx r;
2989 struct hash_table *ht;
2991 tree decl, type;
2992 int volatile_p, used_p;
2994 rtx reg = XEXP (r, 0);
2996 if (GET_CODE (reg) != REG)
2997 abort ();
2999 decl = ADDRESSOF_DECL (r);
3000 if (decl)
3002 type = TREE_TYPE (decl);
3003 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
3004 && TREE_THIS_VOLATILE (decl));
3005 used_p = (TREE_USED (decl)
3006 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
3008 else
3010 type = NULL_TREE;
3011 volatile_p = 0;
3012 used_p = 1;
3015 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
3016 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
3019 /* List of replacements made below in purge_addressof_1 when creating
3020 bitfield insertions. */
3021 static rtx purge_bitfield_addressof_replacements;
3023 /* List of replacements made below in purge_addressof_1 for patterns
3024 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3025 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3026 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3027 enough in complex cases, e.g. when some field values can be
3028 extracted by usage MEM with narrower mode. */
3029 static rtx purge_addressof_replacements;
3031 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3032 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3033 the stack. If the function returns FALSE then the replacement could not
3034 be made. */
3036 static bool
3037 purge_addressof_1 (loc, insn, force, store, ht)
3038 rtx *loc;
3039 rtx insn;
3040 int force, store;
3041 struct hash_table *ht;
3043 rtx x;
3044 RTX_CODE code;
3045 int i, j;
3046 const char *fmt;
3047 bool result = true;
3049 /* Re-start here to avoid recursion in common cases. */
3050 restart:
3052 x = *loc;
3053 if (x == 0)
3054 return true;
3056 code = GET_CODE (x);
3058 /* If we don't return in any of the cases below, we will recurse inside
3059 the RTX, which will normally result in any ADDRESSOF being forced into
3060 memory. */
3061 if (code == SET)
3063 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3064 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3065 return result;
3067 else if (code == ADDRESSOF)
3069 rtx sub, insns;
3071 if (GET_CODE (XEXP (x, 0)) != MEM)
3073 put_addressof_into_stack (x, ht);
3074 return true;
3077 /* We must create a copy of the rtx because it was created by
3078 overwriting a REG rtx which is always shared. */
3079 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3080 if (validate_change (insn, loc, sub, 0)
3081 || validate_replace_rtx (x, sub, insn))
3082 return true;
3084 start_sequence ();
3085 sub = force_operand (sub, NULL_RTX);
3086 if (! validate_change (insn, loc, sub, 0)
3087 && ! validate_replace_rtx (x, sub, insn))
3088 abort ();
3090 insns = gen_sequence ();
3091 end_sequence ();
3092 emit_insn_before (insns, insn);
3093 return true;
3096 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3098 rtx sub = XEXP (XEXP (x, 0), 0);
3100 if (GET_CODE (sub) == MEM)
3101 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3102 else if (GET_CODE (sub) == REG
3103 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3105 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3107 int size_x, size_sub;
3109 if (!insn)
3111 /* When processing REG_NOTES look at the list of
3112 replacements done on the insn to find the register that X
3113 was replaced by. */
3114 rtx tem;
3116 for (tem = purge_bitfield_addressof_replacements;
3117 tem != NULL_RTX;
3118 tem = XEXP (XEXP (tem, 1), 1))
3119 if (rtx_equal_p (x, XEXP (tem, 0)))
3121 *loc = XEXP (XEXP (tem, 1), 0);
3122 return true;
3125 /* See comment for purge_addressof_replacements. */
3126 for (tem = purge_addressof_replacements;
3127 tem != NULL_RTX;
3128 tem = XEXP (XEXP (tem, 1), 1))
3129 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3131 rtx z = XEXP (XEXP (tem, 1), 0);
3133 if (GET_MODE (x) == GET_MODE (z)
3134 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3135 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3136 abort ();
3138 /* It can happen that the note may speak of things
3139 in a wider (or just different) mode than the
3140 code did. This is especially true of
3141 REG_RETVAL. */
3143 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3144 z = SUBREG_REG (z);
3146 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3147 && (GET_MODE_SIZE (GET_MODE (x))
3148 > GET_MODE_SIZE (GET_MODE (z))))
3150 /* This can occur as a result in invalid
3151 pointer casts, e.g. float f; ...
3152 *(long long int *)&f.
3153 ??? We could emit a warning here, but
3154 without a line number that wouldn't be
3155 very helpful. */
3156 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3158 else
3159 z = gen_lowpart (GET_MODE (x), z);
3161 *loc = z;
3162 return true;
3165 /* Sometimes we may not be able to find the replacement. For
3166 example when the original insn was a MEM in a wider mode,
3167 and the note is part of a sign extension of a narrowed
3168 version of that MEM. Gcc testcase compile/990829-1.c can
3169 generate an example of this situation. Rather than complain
3170 we return false, which will prompt our caller to remove the
3171 offending note. */
3172 return false;
3175 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3176 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3178 /* Don't even consider working with paradoxical subregs,
3179 or the moral equivalent seen here. */
3180 if (size_x <= size_sub
3181 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3183 /* Do a bitfield insertion to mirror what would happen
3184 in memory. */
3186 rtx val, seq;
3188 if (store)
3190 rtx p = PREV_INSN (insn);
3192 start_sequence ();
3193 val = gen_reg_rtx (GET_MODE (x));
3194 if (! validate_change (insn, loc, val, 0))
3196 /* Discard the current sequence and put the
3197 ADDRESSOF on stack. */
3198 end_sequence ();
3199 goto give_up;
3201 seq = gen_sequence ();
3202 end_sequence ();
3203 emit_insn_before (seq, insn);
3204 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3205 insn, ht);
3207 start_sequence ();
3208 store_bit_field (sub, size_x, 0, GET_MODE (x),
3209 val, GET_MODE_SIZE (GET_MODE (sub)));
3211 /* Make sure to unshare any shared rtl that store_bit_field
3212 might have created. */
3213 unshare_all_rtl_again (get_insns ());
3215 seq = gen_sequence ();
3216 end_sequence ();
3217 p = emit_insn_after (seq, insn);
3218 if (NEXT_INSN (insn))
3219 compute_insns_for_mem (NEXT_INSN (insn),
3220 p ? NEXT_INSN (p) : NULL_RTX,
3221 ht);
3223 else
3225 rtx p = PREV_INSN (insn);
3227 start_sequence ();
3228 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3229 GET_MODE (x), GET_MODE (x),
3230 GET_MODE_SIZE (GET_MODE (sub)));
3232 if (! validate_change (insn, loc, val, 0))
3234 /* Discard the current sequence and put the
3235 ADDRESSOF on stack. */
3236 end_sequence ();
3237 goto give_up;
3240 seq = gen_sequence ();
3241 end_sequence ();
3242 emit_insn_before (seq, insn);
3243 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3244 insn, ht);
3247 /* Remember the replacement so that the same one can be done
3248 on the REG_NOTES. */
3249 purge_bitfield_addressof_replacements
3250 = gen_rtx_EXPR_LIST (VOIDmode, x,
3251 gen_rtx_EXPR_LIST
3252 (VOIDmode, val,
3253 purge_bitfield_addressof_replacements));
3255 /* We replaced with a reg -- all done. */
3256 return true;
3260 else if (validate_change (insn, loc, sub, 0))
3262 /* Remember the replacement so that the same one can be done
3263 on the REG_NOTES. */
3264 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3266 rtx tem;
3268 for (tem = purge_addressof_replacements;
3269 tem != NULL_RTX;
3270 tem = XEXP (XEXP (tem, 1), 1))
3271 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3273 XEXP (XEXP (tem, 1), 0) = sub;
3274 return true;
3276 purge_addressof_replacements
3277 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3278 gen_rtx_EXPR_LIST (VOIDmode, sub,
3279 purge_addressof_replacements));
3280 return true;
3282 goto restart;
3286 give_up:
3287 /* Scan all subexpressions. */
3288 fmt = GET_RTX_FORMAT (code);
3289 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3291 if (*fmt == 'e')
3292 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3293 else if (*fmt == 'E')
3294 for (j = 0; j < XVECLEN (x, i); j++)
3295 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3298 return result;
3301 /* Return a new hash table entry in HT. */
3303 static struct hash_entry *
3304 insns_for_mem_newfunc (he, ht, k)
3305 struct hash_entry *he;
3306 struct hash_table *ht;
3307 hash_table_key k ATTRIBUTE_UNUSED;
3309 struct insns_for_mem_entry *ifmhe;
3310 if (he)
3311 return he;
3313 ifmhe = ((struct insns_for_mem_entry *)
3314 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3315 ifmhe->insns = NULL_RTX;
3317 return &ifmhe->he;
3320 /* Return a hash value for K, a REG. */
3322 static unsigned long
3323 insns_for_mem_hash (k)
3324 hash_table_key k;
3326 /* K is really a RTX. Just use the address as the hash value. */
3327 return (unsigned long) k;
3330 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3332 static bool
3333 insns_for_mem_comp (k1, k2)
3334 hash_table_key k1;
3335 hash_table_key k2;
3337 return k1 == k2;
3340 struct insns_for_mem_walk_info
3342 /* The hash table that we are using to record which INSNs use which
3343 MEMs. */
3344 struct hash_table *ht;
3346 /* The INSN we are currently processing. */
3347 rtx insn;
3349 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3350 to find the insns that use the REGs in the ADDRESSOFs. */
3351 int pass;
3354 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3355 that might be used in an ADDRESSOF expression, record this INSN in
3356 the hash table given by DATA (which is really a pointer to an
3357 insns_for_mem_walk_info structure). */
3359 static int
3360 insns_for_mem_walk (r, data)
3361 rtx *r;
3362 void *data;
3364 struct insns_for_mem_walk_info *ifmwi
3365 = (struct insns_for_mem_walk_info *) data;
3367 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3368 && GET_CODE (XEXP (*r, 0)) == REG)
3369 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3370 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3372 /* Lookup this MEM in the hashtable, creating it if necessary. */
3373 struct insns_for_mem_entry *ifme
3374 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3376 /*create=*/0,
3377 /*copy=*/0);
3379 /* If we have not already recorded this INSN, do so now. Since
3380 we process the INSNs in order, we know that if we have
3381 recorded it it must be at the front of the list. */
3382 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3383 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3384 ifme->insns);
3387 return 0;
3390 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3391 which REGs in HT. */
3393 static void
3394 compute_insns_for_mem (insns, last_insn, ht)
3395 rtx insns;
3396 rtx last_insn;
3397 struct hash_table *ht;
3399 rtx insn;
3400 struct insns_for_mem_walk_info ifmwi;
3401 ifmwi.ht = ht;
3403 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3404 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3405 if (INSN_P (insn))
3407 ifmwi.insn = insn;
3408 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3412 /* Helper function for purge_addressof called through for_each_rtx.
3413 Returns true iff the rtl is an ADDRESSOF. */
3415 static int
3416 is_addressof (rtl, data)
3417 rtx *rtl;
3418 void *data ATTRIBUTE_UNUSED;
3420 return GET_CODE (*rtl) == ADDRESSOF;
3423 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3424 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3425 stack. */
3427 void
3428 purge_addressof (insns)
3429 rtx insns;
3431 rtx insn;
3432 struct hash_table ht;
3434 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3435 requires a fixup pass over the instruction stream to correct
3436 INSNs that depended on the REG being a REG, and not a MEM. But,
3437 these fixup passes are slow. Furthermore, most MEMs are not
3438 mentioned in very many instructions. So, we speed up the process
3439 by pre-calculating which REGs occur in which INSNs; that allows
3440 us to perform the fixup passes much more quickly. */
3441 hash_table_init (&ht,
3442 insns_for_mem_newfunc,
3443 insns_for_mem_hash,
3444 insns_for_mem_comp);
3445 compute_insns_for_mem (insns, NULL_RTX, &ht);
3447 for (insn = insns; insn; insn = NEXT_INSN (insn))
3448 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3449 || GET_CODE (insn) == CALL_INSN)
3451 if (! purge_addressof_1 (&PATTERN (insn), insn,
3452 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3453 /* If we could not replace the ADDRESSOFs in the insn,
3454 something is wrong. */
3455 abort ();
3457 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3459 /* If we could not replace the ADDRESSOFs in the insn's notes,
3460 we can just remove the offending notes instead. */
3461 rtx note;
3463 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3465 /* If we find a REG_RETVAL note then the insn is a libcall.
3466 Such insns must have REG_EQUAL notes as well, in order
3467 for later passes of the compiler to work. So it is not
3468 safe to delete the notes here, and instead we abort. */
3469 if (REG_NOTE_KIND (note) == REG_RETVAL)
3470 abort ();
3471 if (for_each_rtx (&note, is_addressof, NULL))
3472 remove_note (insn, note);
3477 /* Clean up. */
3478 hash_table_free (&ht);
3479 purge_bitfield_addressof_replacements = 0;
3480 purge_addressof_replacements = 0;
3482 /* REGs are shared. purge_addressof will destructively replace a REG
3483 with a MEM, which creates shared MEMs.
3485 Unfortunately, the children of put_reg_into_stack assume that MEMs
3486 referring to the same stack slot are shared (fixup_var_refs and
3487 the associated hash table code).
3489 So, we have to do another unsharing pass after we have flushed any
3490 REGs that had their address taken into the stack.
3492 It may be worth tracking whether or not we converted any REGs into
3493 MEMs to avoid this overhead when it is not needed. */
3494 unshare_all_rtl_again (get_insns ());
3497 /* Convert a SET of a hard subreg to a set of the appropriate hard
3498 register. A subroutine of purge_hard_subreg_sets. */
3500 static void
3501 purge_single_hard_subreg_set (pattern)
3502 rtx pattern;
3504 rtx reg = SET_DEST (pattern);
3505 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3506 int offset = 0;
3508 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3509 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3511 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3512 GET_MODE (SUBREG_REG (reg)),
3513 SUBREG_BYTE (reg),
3514 GET_MODE (reg));
3515 reg = SUBREG_REG (reg);
3519 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3521 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3522 SET_DEST (pattern) = reg;
3526 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3527 only such SETs that we expect to see are those left in because
3528 integrate can't handle sets of parts of a return value register.
3530 We don't use alter_subreg because we only want to eliminate subregs
3531 of hard registers. */
3533 void
3534 purge_hard_subreg_sets (insn)
3535 rtx insn;
3537 for (; insn; insn = NEXT_INSN (insn))
3539 if (INSN_P (insn))
3541 rtx pattern = PATTERN (insn);
3542 switch (GET_CODE (pattern))
3544 case SET:
3545 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3546 purge_single_hard_subreg_set (pattern);
3547 break;
3548 case PARALLEL:
3550 int j;
3551 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3553 rtx inner_pattern = XVECEXP (pattern, 0, j);
3554 if (GET_CODE (inner_pattern) == SET
3555 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3556 purge_single_hard_subreg_set (inner_pattern);
3559 break;
3560 default:
3561 break;
3567 /* Pass through the INSNS of function FNDECL and convert virtual register
3568 references to hard register references. */
3570 void
3571 instantiate_virtual_regs (fndecl, insns)
3572 tree fndecl;
3573 rtx insns;
3575 rtx insn;
3576 unsigned int i;
3578 /* Compute the offsets to use for this function. */
3579 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3580 var_offset = STARTING_FRAME_OFFSET;
3581 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3582 out_arg_offset = STACK_POINTER_OFFSET;
3583 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3585 /* Scan all variables and parameters of this function. For each that is
3586 in memory, instantiate all virtual registers if the result is a valid
3587 address. If not, we do it later. That will handle most uses of virtual
3588 regs on many machines. */
3589 instantiate_decls (fndecl, 1);
3591 /* Initialize recognition, indicating that volatile is OK. */
3592 init_recog ();
3594 /* Scan through all the insns, instantiating every virtual register still
3595 present. */
3596 for (insn = insns; insn; insn = NEXT_INSN (insn))
3597 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3598 || GET_CODE (insn) == CALL_INSN)
3600 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3601 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3602 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3603 if (GET_CODE (insn) == CALL_INSN)
3604 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3605 NULL_RTX, 0);
3608 /* Instantiate the stack slots for the parm registers, for later use in
3609 addressof elimination. */
3610 for (i = 0; i < max_parm_reg; ++i)
3611 if (parm_reg_stack_loc[i])
3612 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3614 /* Now instantiate the remaining register equivalences for debugging info.
3615 These will not be valid addresses. */
3616 instantiate_decls (fndecl, 0);
3618 /* Indicate that, from now on, assign_stack_local should use
3619 frame_pointer_rtx. */
3620 virtuals_instantiated = 1;
3623 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3624 all virtual registers in their DECL_RTL's.
3626 If VALID_ONLY, do this only if the resulting address is still valid.
3627 Otherwise, always do it. */
3629 static void
3630 instantiate_decls (fndecl, valid_only)
3631 tree fndecl;
3632 int valid_only;
3634 tree decl;
3636 /* Process all parameters of the function. */
3637 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3639 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3640 HOST_WIDE_INT size_rtl;
3642 instantiate_decl (DECL_RTL (decl), size, valid_only);
3644 /* If the parameter was promoted, then the incoming RTL mode may be
3645 larger than the declared type size. We must use the larger of
3646 the two sizes. */
3647 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3648 size = MAX (size_rtl, size);
3649 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3652 /* Now process all variables defined in the function or its subblocks. */
3653 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3656 /* Subroutine of instantiate_decls: Process all decls in the given
3657 BLOCK node and all its subblocks. */
3659 static void
3660 instantiate_decls_1 (let, valid_only)
3661 tree let;
3662 int valid_only;
3664 tree t;
3666 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3667 if (DECL_RTL_SET_P (t))
3668 instantiate_decl (DECL_RTL (t),
3669 int_size_in_bytes (TREE_TYPE (t)),
3670 valid_only);
3672 /* Process all subblocks. */
3673 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3674 instantiate_decls_1 (t, valid_only);
3677 /* Subroutine of the preceding procedures: Given RTL representing a
3678 decl and the size of the object, do any instantiation required.
3680 If VALID_ONLY is non-zero, it means that the RTL should only be
3681 changed if the new address is valid. */
3683 static void
3684 instantiate_decl (x, size, valid_only)
3685 rtx x;
3686 HOST_WIDE_INT size;
3687 int valid_only;
3689 enum machine_mode mode;
3690 rtx addr;
3692 /* If this is not a MEM, no need to do anything. Similarly if the
3693 address is a constant or a register that is not a virtual register. */
3695 if (x == 0 || GET_CODE (x) != MEM)
3696 return;
3698 addr = XEXP (x, 0);
3699 if (CONSTANT_P (addr)
3700 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3701 || (GET_CODE (addr) == REG
3702 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3703 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3704 return;
3706 /* If we should only do this if the address is valid, copy the address.
3707 We need to do this so we can undo any changes that might make the
3708 address invalid. This copy is unfortunate, but probably can't be
3709 avoided. */
3711 if (valid_only)
3712 addr = copy_rtx (addr);
3714 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3716 if (valid_only && size >= 0)
3718 unsigned HOST_WIDE_INT decl_size = size;
3720 /* Now verify that the resulting address is valid for every integer or
3721 floating-point mode up to and including SIZE bytes long. We do this
3722 since the object might be accessed in any mode and frame addresses
3723 are shared. */
3725 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3726 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3727 mode = GET_MODE_WIDER_MODE (mode))
3728 if (! memory_address_p (mode, addr))
3729 return;
3731 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3732 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3733 mode = GET_MODE_WIDER_MODE (mode))
3734 if (! memory_address_p (mode, addr))
3735 return;
3738 /* Put back the address now that we have updated it and we either know
3739 it is valid or we don't care whether it is valid. */
3741 XEXP (x, 0) = addr;
3744 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3745 is a virtual register, return the equivalent hard register and set the
3746 offset indirectly through the pointer. Otherwise, return 0. */
3748 static rtx
3749 instantiate_new_reg (x, poffset)
3750 rtx x;
3751 HOST_WIDE_INT *poffset;
3753 rtx new;
3754 HOST_WIDE_INT offset;
3756 if (x == virtual_incoming_args_rtx)
3757 new = arg_pointer_rtx, offset = in_arg_offset;
3758 else if (x == virtual_stack_vars_rtx)
3759 new = frame_pointer_rtx, offset = var_offset;
3760 else if (x == virtual_stack_dynamic_rtx)
3761 new = stack_pointer_rtx, offset = dynamic_offset;
3762 else if (x == virtual_outgoing_args_rtx)
3763 new = stack_pointer_rtx, offset = out_arg_offset;
3764 else if (x == virtual_cfa_rtx)
3765 new = arg_pointer_rtx, offset = cfa_offset;
3766 else
3767 return 0;
3769 *poffset = offset;
3770 return new;
3773 /* Given a pointer to a piece of rtx and an optional pointer to the
3774 containing object, instantiate any virtual registers present in it.
3776 If EXTRA_INSNS, we always do the replacement and generate
3777 any extra insns before OBJECT. If it zero, we do nothing if replacement
3778 is not valid.
3780 Return 1 if we either had nothing to do or if we were able to do the
3781 needed replacement. Return 0 otherwise; we only return zero if
3782 EXTRA_INSNS is zero.
3784 We first try some simple transformations to avoid the creation of extra
3785 pseudos. */
3787 static int
3788 instantiate_virtual_regs_1 (loc, object, extra_insns)
3789 rtx *loc;
3790 rtx object;
3791 int extra_insns;
3793 rtx x;
3794 RTX_CODE code;
3795 rtx new = 0;
3796 HOST_WIDE_INT offset = 0;
3797 rtx temp;
3798 rtx seq;
3799 int i, j;
3800 const char *fmt;
3802 /* Re-start here to avoid recursion in common cases. */
3803 restart:
3805 x = *loc;
3806 if (x == 0)
3807 return 1;
3809 code = GET_CODE (x);
3811 /* Check for some special cases. */
3812 switch (code)
3814 case CONST_INT:
3815 case CONST_DOUBLE:
3816 case CONST_VECTOR:
3817 case CONST:
3818 case SYMBOL_REF:
3819 case CODE_LABEL:
3820 case PC:
3821 case CC0:
3822 case ASM_INPUT:
3823 case ADDR_VEC:
3824 case ADDR_DIFF_VEC:
3825 case RETURN:
3826 return 1;
3828 case SET:
3829 /* We are allowed to set the virtual registers. This means that
3830 the actual register should receive the source minus the
3831 appropriate offset. This is used, for example, in the handling
3832 of non-local gotos. */
3833 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3835 rtx src = SET_SRC (x);
3837 /* We are setting the register, not using it, so the relevant
3838 offset is the negative of the offset to use were we using
3839 the register. */
3840 offset = - offset;
3841 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3843 /* The only valid sources here are PLUS or REG. Just do
3844 the simplest possible thing to handle them. */
3845 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3846 abort ();
3848 start_sequence ();
3849 if (GET_CODE (src) != REG)
3850 temp = force_operand (src, NULL_RTX);
3851 else
3852 temp = src;
3853 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3854 seq = get_insns ();
3855 end_sequence ();
3857 emit_insns_before (seq, object);
3858 SET_DEST (x) = new;
3860 if (! validate_change (object, &SET_SRC (x), temp, 0)
3861 || ! extra_insns)
3862 abort ();
3864 return 1;
3867 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3868 loc = &SET_SRC (x);
3869 goto restart;
3871 case PLUS:
3872 /* Handle special case of virtual register plus constant. */
3873 if (CONSTANT_P (XEXP (x, 1)))
3875 rtx old, new_offset;
3877 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3878 if (GET_CODE (XEXP (x, 0)) == PLUS)
3880 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3882 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3883 extra_insns);
3884 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3886 else
3888 loc = &XEXP (x, 0);
3889 goto restart;
3893 #ifdef POINTERS_EXTEND_UNSIGNED
3894 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3895 we can commute the PLUS and SUBREG because pointers into the
3896 frame are well-behaved. */
3897 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3898 && GET_CODE (XEXP (x, 1)) == CONST_INT
3899 && 0 != (new
3900 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3901 &offset))
3902 && validate_change (object, loc,
3903 plus_constant (gen_lowpart (ptr_mode,
3904 new),
3905 offset
3906 + INTVAL (XEXP (x, 1))),
3908 return 1;
3909 #endif
3910 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3912 /* We know the second operand is a constant. Unless the
3913 first operand is a REG (which has been already checked),
3914 it needs to be checked. */
3915 if (GET_CODE (XEXP (x, 0)) != REG)
3917 loc = &XEXP (x, 0);
3918 goto restart;
3920 return 1;
3923 new_offset = plus_constant (XEXP (x, 1), offset);
3925 /* If the new constant is zero, try to replace the sum with just
3926 the register. */
3927 if (new_offset == const0_rtx
3928 && validate_change (object, loc, new, 0))
3929 return 1;
3931 /* Next try to replace the register and new offset.
3932 There are two changes to validate here and we can't assume that
3933 in the case of old offset equals new just changing the register
3934 will yield a valid insn. In the interests of a little efficiency,
3935 however, we only call validate change once (we don't queue up the
3936 changes and then call apply_change_group). */
3938 old = XEXP (x, 0);
3939 if (offset == 0
3940 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3941 : (XEXP (x, 0) = new,
3942 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3944 if (! extra_insns)
3946 XEXP (x, 0) = old;
3947 return 0;
3950 /* Otherwise copy the new constant into a register and replace
3951 constant with that register. */
3952 temp = gen_reg_rtx (Pmode);
3953 XEXP (x, 0) = new;
3954 if (validate_change (object, &XEXP (x, 1), temp, 0))
3955 emit_insn_before (gen_move_insn (temp, new_offset), object);
3956 else
3958 /* If that didn't work, replace this expression with a
3959 register containing the sum. */
3961 XEXP (x, 0) = old;
3962 new = gen_rtx_PLUS (Pmode, new, new_offset);
3964 start_sequence ();
3965 temp = force_operand (new, NULL_RTX);
3966 seq = get_insns ();
3967 end_sequence ();
3969 emit_insns_before (seq, object);
3970 if (! validate_change (object, loc, temp, 0)
3971 && ! validate_replace_rtx (x, temp, object))
3972 abort ();
3976 return 1;
3979 /* Fall through to generic two-operand expression case. */
3980 case EXPR_LIST:
3981 case CALL:
3982 case COMPARE:
3983 case MINUS:
3984 case MULT:
3985 case DIV: case UDIV:
3986 case MOD: case UMOD:
3987 case AND: case IOR: case XOR:
3988 case ROTATERT: case ROTATE:
3989 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3990 case NE: case EQ:
3991 case GE: case GT: case GEU: case GTU:
3992 case LE: case LT: case LEU: case LTU:
3993 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3994 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3995 loc = &XEXP (x, 0);
3996 goto restart;
3998 case MEM:
3999 /* Most cases of MEM that convert to valid addresses have already been
4000 handled by our scan of decls. The only special handling we
4001 need here is to make a copy of the rtx to ensure it isn't being
4002 shared if we have to change it to a pseudo.
4004 If the rtx is a simple reference to an address via a virtual register,
4005 it can potentially be shared. In such cases, first try to make it
4006 a valid address, which can also be shared. Otherwise, copy it and
4007 proceed normally.
4009 First check for common cases that need no processing. These are
4010 usually due to instantiation already being done on a previous instance
4011 of a shared rtx. */
4013 temp = XEXP (x, 0);
4014 if (CONSTANT_ADDRESS_P (temp)
4015 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4016 || temp == arg_pointer_rtx
4017 #endif
4018 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4019 || temp == hard_frame_pointer_rtx
4020 #endif
4021 || temp == frame_pointer_rtx)
4022 return 1;
4024 if (GET_CODE (temp) == PLUS
4025 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4026 && (XEXP (temp, 0) == frame_pointer_rtx
4027 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4028 || XEXP (temp, 0) == hard_frame_pointer_rtx
4029 #endif
4030 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4031 || XEXP (temp, 0) == arg_pointer_rtx
4032 #endif
4034 return 1;
4036 if (temp == virtual_stack_vars_rtx
4037 || temp == virtual_incoming_args_rtx
4038 || (GET_CODE (temp) == PLUS
4039 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4040 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4041 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4043 /* This MEM may be shared. If the substitution can be done without
4044 the need to generate new pseudos, we want to do it in place
4045 so all copies of the shared rtx benefit. The call below will
4046 only make substitutions if the resulting address is still
4047 valid.
4049 Note that we cannot pass X as the object in the recursive call
4050 since the insn being processed may not allow all valid
4051 addresses. However, if we were not passed on object, we can
4052 only modify X without copying it if X will have a valid
4053 address.
4055 ??? Also note that this can still lose if OBJECT is an insn that
4056 has less restrictions on an address that some other insn.
4057 In that case, we will modify the shared address. This case
4058 doesn't seem very likely, though. One case where this could
4059 happen is in the case of a USE or CLOBBER reference, but we
4060 take care of that below. */
4062 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4063 object ? object : x, 0))
4064 return 1;
4066 /* Otherwise make a copy and process that copy. We copy the entire
4067 RTL expression since it might be a PLUS which could also be
4068 shared. */
4069 *loc = x = copy_rtx (x);
4072 /* Fall through to generic unary operation case. */
4073 case PREFETCH:
4074 case SUBREG:
4075 case STRICT_LOW_PART:
4076 case NEG: case NOT:
4077 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4078 case SIGN_EXTEND: case ZERO_EXTEND:
4079 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4080 case FLOAT: case FIX:
4081 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4082 case ABS:
4083 case SQRT:
4084 case FFS:
4085 /* These case either have just one operand or we know that we need not
4086 check the rest of the operands. */
4087 loc = &XEXP (x, 0);
4088 goto restart;
4090 case USE:
4091 case CLOBBER:
4092 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4093 go ahead and make the invalid one, but do it to a copy. For a REG,
4094 just make the recursive call, since there's no chance of a problem. */
4096 if ((GET_CODE (XEXP (x, 0)) == MEM
4097 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4099 || (GET_CODE (XEXP (x, 0)) == REG
4100 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4101 return 1;
4103 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4104 loc = &XEXP (x, 0);
4105 goto restart;
4107 case REG:
4108 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4109 in front of this insn and substitute the temporary. */
4110 if ((new = instantiate_new_reg (x, &offset)) != 0)
4112 temp = plus_constant (new, offset);
4113 if (!validate_change (object, loc, temp, 0))
4115 if (! extra_insns)
4116 return 0;
4118 start_sequence ();
4119 temp = force_operand (temp, NULL_RTX);
4120 seq = get_insns ();
4121 end_sequence ();
4123 emit_insns_before (seq, object);
4124 if (! validate_change (object, loc, temp, 0)
4125 && ! validate_replace_rtx (x, temp, object))
4126 abort ();
4130 return 1;
4132 case ADDRESSOF:
4133 if (GET_CODE (XEXP (x, 0)) == REG)
4134 return 1;
4136 else if (GET_CODE (XEXP (x, 0)) == MEM)
4138 /* If we have a (addressof (mem ..)), do any instantiation inside
4139 since we know we'll be making the inside valid when we finally
4140 remove the ADDRESSOF. */
4141 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4142 return 1;
4144 break;
4146 default:
4147 break;
4150 /* Scan all subexpressions. */
4151 fmt = GET_RTX_FORMAT (code);
4152 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4153 if (*fmt == 'e')
4155 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4156 return 0;
4158 else if (*fmt == 'E')
4159 for (j = 0; j < XVECLEN (x, i); j++)
4160 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4161 extra_insns))
4162 return 0;
4164 return 1;
4167 /* Optimization: assuming this function does not receive nonlocal gotos,
4168 delete the handlers for such, as well as the insns to establish
4169 and disestablish them. */
4171 static void
4172 delete_handlers ()
4174 rtx insn;
4175 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4177 /* Delete the handler by turning off the flag that would
4178 prevent jump_optimize from deleting it.
4179 Also permit deletion of the nonlocal labels themselves
4180 if nothing local refers to them. */
4181 if (GET_CODE (insn) == CODE_LABEL)
4183 tree t, last_t;
4185 LABEL_PRESERVE_P (insn) = 0;
4187 /* Remove it from the nonlocal_label list, to avoid confusing
4188 flow. */
4189 for (t = nonlocal_labels, last_t = 0; t;
4190 last_t = t, t = TREE_CHAIN (t))
4191 if (DECL_RTL (TREE_VALUE (t)) == insn)
4192 break;
4193 if (t)
4195 if (! last_t)
4196 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4197 else
4198 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4201 if (GET_CODE (insn) == INSN)
4203 int can_delete = 0;
4204 rtx t;
4205 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4206 if (reg_mentioned_p (t, PATTERN (insn)))
4208 can_delete = 1;
4209 break;
4211 if (can_delete
4212 || (nonlocal_goto_stack_level != 0
4213 && reg_mentioned_p (nonlocal_goto_stack_level,
4214 PATTERN (insn))))
4215 delete_related_insns (insn);
4221 max_parm_reg_num ()
4223 return max_parm_reg;
4226 /* Return the first insn following those generated by `assign_parms'. */
4229 get_first_nonparm_insn ()
4231 if (last_parm_insn)
4232 return NEXT_INSN (last_parm_insn);
4233 return get_insns ();
4236 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4237 Crash if there is none. */
4240 get_first_block_beg ()
4242 rtx searcher;
4243 rtx insn = get_first_nonparm_insn ();
4245 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4246 if (GET_CODE (searcher) == NOTE
4247 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4248 return searcher;
4250 abort (); /* Invalid call to this function. (See comments above.) */
4251 return NULL_RTX;
4254 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4255 This means a type for which function calls must pass an address to the
4256 function or get an address back from the function.
4257 EXP may be a type node or an expression (whose type is tested). */
4260 aggregate_value_p (exp)
4261 tree exp;
4263 int i, regno, nregs;
4264 rtx reg;
4266 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4268 if (TREE_CODE (type) == VOID_TYPE)
4269 return 0;
4270 if (RETURN_IN_MEMORY (type))
4271 return 1;
4272 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4273 and thus can't be returned in registers. */
4274 if (TREE_ADDRESSABLE (type))
4275 return 1;
4276 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4277 return 1;
4278 /* Make sure we have suitable call-clobbered regs to return
4279 the value in; if not, we must return it in memory. */
4280 reg = hard_function_value (type, 0, 0);
4282 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4283 it is OK. */
4284 if (GET_CODE (reg) != REG)
4285 return 0;
4287 regno = REGNO (reg);
4288 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4289 for (i = 0; i < nregs; i++)
4290 if (! call_used_regs[regno + i])
4291 return 1;
4292 return 0;
4295 /* Assign RTL expressions to the function's parameters.
4296 This may involve copying them into registers and using
4297 those registers as the RTL for them. */
4299 void
4300 assign_parms (fndecl)
4301 tree fndecl;
4303 tree parm;
4304 rtx entry_parm = 0;
4305 rtx stack_parm = 0;
4306 CUMULATIVE_ARGS args_so_far;
4307 enum machine_mode promoted_mode, passed_mode;
4308 enum machine_mode nominal_mode, promoted_nominal_mode;
4309 int unsignedp;
4310 /* Total space needed so far for args on the stack,
4311 given as a constant and a tree-expression. */
4312 struct args_size stack_args_size;
4313 tree fntype = TREE_TYPE (fndecl);
4314 tree fnargs = DECL_ARGUMENTS (fndecl);
4315 /* This is used for the arg pointer when referring to stack args. */
4316 rtx internal_arg_pointer;
4317 /* This is a dummy PARM_DECL that we used for the function result if
4318 the function returns a structure. */
4319 tree function_result_decl = 0;
4320 #ifdef SETUP_INCOMING_VARARGS
4321 int varargs_setup = 0;
4322 #endif
4323 rtx conversion_insns = 0;
4324 struct args_size alignment_pad;
4326 /* Nonzero if the last arg is named `__builtin_va_alist',
4327 which is used on some machines for old-fashioned non-ANSI varargs.h;
4328 this should be stuck onto the stack as if it had arrived there. */
4329 int hide_last_arg
4330 = (current_function_varargs
4331 && fnargs
4332 && (parm = tree_last (fnargs)) != 0
4333 && DECL_NAME (parm)
4334 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4335 "__builtin_va_alist")));
4337 /* Nonzero if function takes extra anonymous args.
4338 This means the last named arg must be on the stack
4339 right before the anonymous ones. */
4340 int stdarg
4341 = (TYPE_ARG_TYPES (fntype) != 0
4342 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4343 != void_type_node));
4345 current_function_stdarg = stdarg;
4347 /* If the reg that the virtual arg pointer will be translated into is
4348 not a fixed reg or is the stack pointer, make a copy of the virtual
4349 arg pointer, and address parms via the copy. The frame pointer is
4350 considered fixed even though it is not marked as such.
4352 The second time through, simply use ap to avoid generating rtx. */
4354 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4355 || ! (fixed_regs[ARG_POINTER_REGNUM]
4356 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4357 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4358 else
4359 internal_arg_pointer = virtual_incoming_args_rtx;
4360 current_function_internal_arg_pointer = internal_arg_pointer;
4362 stack_args_size.constant = 0;
4363 stack_args_size.var = 0;
4365 /* If struct value address is treated as the first argument, make it so. */
4366 if (aggregate_value_p (DECL_RESULT (fndecl))
4367 && ! current_function_returns_pcc_struct
4368 && struct_value_incoming_rtx == 0)
4370 tree type = build_pointer_type (TREE_TYPE (fntype));
4372 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4374 DECL_ARG_TYPE (function_result_decl) = type;
4375 TREE_CHAIN (function_result_decl) = fnargs;
4376 fnargs = function_result_decl;
4379 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4380 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4382 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4383 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4384 #else
4385 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4386 #endif
4388 /* We haven't yet found an argument that we must push and pretend the
4389 caller did. */
4390 current_function_pretend_args_size = 0;
4392 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4394 struct args_size stack_offset;
4395 struct args_size arg_size;
4396 int passed_pointer = 0;
4397 int did_conversion = 0;
4398 tree passed_type = DECL_ARG_TYPE (parm);
4399 tree nominal_type = TREE_TYPE (parm);
4400 int pretend_named;
4401 int last_named = 0, named_arg;
4403 /* Set LAST_NAMED if this is last named arg before last
4404 anonymous args. */
4405 if (stdarg || current_function_varargs)
4407 tree tem;
4409 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4410 if (DECL_NAME (tem))
4411 break;
4413 if (tem == 0)
4414 last_named = 1;
4416 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4417 most machines, if this is a varargs/stdarg function, then we treat
4418 the last named arg as if it were anonymous too. */
4419 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4421 if (TREE_TYPE (parm) == error_mark_node
4422 /* This can happen after weird syntax errors
4423 or if an enum type is defined among the parms. */
4424 || TREE_CODE (parm) != PARM_DECL
4425 || passed_type == NULL)
4427 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4428 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4429 TREE_USED (parm) = 1;
4430 continue;
4433 /* For varargs.h function, save info about regs and stack space
4434 used by the individual args, not including the va_alist arg. */
4435 if (hide_last_arg && last_named)
4436 current_function_args_info = args_so_far;
4438 /* Find mode of arg as it is passed, and mode of arg
4439 as it should be during execution of this function. */
4440 passed_mode = TYPE_MODE (passed_type);
4441 nominal_mode = TYPE_MODE (nominal_type);
4443 /* If the parm's mode is VOID, its value doesn't matter,
4444 and avoid the usual things like emit_move_insn that could crash. */
4445 if (nominal_mode == VOIDmode)
4447 SET_DECL_RTL (parm, const0_rtx);
4448 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4449 continue;
4452 /* If the parm is to be passed as a transparent union, use the
4453 type of the first field for the tests below. We have already
4454 verified that the modes are the same. */
4455 if (DECL_TRANSPARENT_UNION (parm)
4456 || (TREE_CODE (passed_type) == UNION_TYPE
4457 && TYPE_TRANSPARENT_UNION (passed_type)))
4458 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4460 /* See if this arg was passed by invisible reference. It is if
4461 it is an object whose size depends on the contents of the
4462 object itself or if the machine requires these objects be passed
4463 that way. */
4465 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4466 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4467 || TREE_ADDRESSABLE (passed_type)
4468 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4469 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4470 passed_type, named_arg)
4471 #endif
4474 passed_type = nominal_type = build_pointer_type (passed_type);
4475 passed_pointer = 1;
4476 passed_mode = nominal_mode = Pmode;
4479 promoted_mode = passed_mode;
4481 #ifdef PROMOTE_FUNCTION_ARGS
4482 /* Compute the mode in which the arg is actually extended to. */
4483 unsignedp = TREE_UNSIGNED (passed_type);
4484 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4485 #endif
4487 /* Let machine desc say which reg (if any) the parm arrives in.
4488 0 means it arrives on the stack. */
4489 #ifdef FUNCTION_INCOMING_ARG
4490 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4491 passed_type, named_arg);
4492 #else
4493 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4494 passed_type, named_arg);
4495 #endif
4497 if (entry_parm == 0)
4498 promoted_mode = passed_mode;
4500 #ifdef SETUP_INCOMING_VARARGS
4501 /* If this is the last named parameter, do any required setup for
4502 varargs or stdargs. We need to know about the case of this being an
4503 addressable type, in which case we skip the registers it
4504 would have arrived in.
4506 For stdargs, LAST_NAMED will be set for two parameters, the one that
4507 is actually the last named, and the dummy parameter. We only
4508 want to do this action once.
4510 Also, indicate when RTL generation is to be suppressed. */
4511 if (last_named && !varargs_setup)
4513 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4514 current_function_pretend_args_size, 0);
4515 varargs_setup = 1;
4517 #endif
4519 /* Determine parm's home in the stack,
4520 in case it arrives in the stack or we should pretend it did.
4522 Compute the stack position and rtx where the argument arrives
4523 and its size.
4525 There is one complexity here: If this was a parameter that would
4526 have been passed in registers, but wasn't only because it is
4527 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4528 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4529 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4530 0 as it was the previous time. */
4532 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4533 locate_and_pad_parm (promoted_mode, passed_type,
4534 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4536 #else
4537 #ifdef FUNCTION_INCOMING_ARG
4538 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4539 passed_type,
4540 pretend_named) != 0,
4541 #else
4542 FUNCTION_ARG (args_so_far, promoted_mode,
4543 passed_type,
4544 pretend_named) != 0,
4545 #endif
4546 #endif
4547 fndecl, &stack_args_size, &stack_offset, &arg_size,
4548 &alignment_pad);
4551 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4553 if (offset_rtx == const0_rtx)
4554 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4555 else
4556 stack_parm = gen_rtx_MEM (promoted_mode,
4557 gen_rtx_PLUS (Pmode,
4558 internal_arg_pointer,
4559 offset_rtx));
4561 set_mem_attributes (stack_parm, parm, 1);
4564 /* If this parameter was passed both in registers and in the stack,
4565 use the copy on the stack. */
4566 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4567 entry_parm = 0;
4569 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4570 /* If this parm was passed part in regs and part in memory,
4571 pretend it arrived entirely in memory
4572 by pushing the register-part onto the stack.
4574 In the special case of a DImode or DFmode that is split,
4575 we could put it together in a pseudoreg directly,
4576 but for now that's not worth bothering with. */
4578 if (entry_parm)
4580 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4581 passed_type, named_arg);
4583 if (nregs > 0)
4585 current_function_pretend_args_size
4586 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4587 / (PARM_BOUNDARY / BITS_PER_UNIT)
4588 * (PARM_BOUNDARY / BITS_PER_UNIT));
4590 /* Handle calls that pass values in multiple non-contiguous
4591 locations. The Irix 6 ABI has examples of this. */
4592 if (GET_CODE (entry_parm) == PARALLEL)
4593 emit_group_store (validize_mem (stack_parm), entry_parm,
4594 int_size_in_bytes (TREE_TYPE (parm)));
4596 else
4597 move_block_from_reg (REGNO (entry_parm),
4598 validize_mem (stack_parm), nregs,
4599 int_size_in_bytes (TREE_TYPE (parm)));
4601 entry_parm = stack_parm;
4604 #endif
4606 /* If we didn't decide this parm came in a register,
4607 by default it came on the stack. */
4608 if (entry_parm == 0)
4609 entry_parm = stack_parm;
4611 /* Record permanently how this parm was passed. */
4612 DECL_INCOMING_RTL (parm) = entry_parm;
4614 /* If there is actually space on the stack for this parm,
4615 count it in stack_args_size; otherwise set stack_parm to 0
4616 to indicate there is no preallocated stack slot for the parm. */
4618 if (entry_parm == stack_parm
4619 || (GET_CODE (entry_parm) == PARALLEL
4620 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4621 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4622 /* On some machines, even if a parm value arrives in a register
4623 there is still an (uninitialized) stack slot allocated for it.
4625 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4626 whether this parameter already has a stack slot allocated,
4627 because an arg block exists only if current_function_args_size
4628 is larger than some threshold, and we haven't calculated that
4629 yet. So, for now, we just assume that stack slots never exist
4630 in this case. */
4631 || REG_PARM_STACK_SPACE (fndecl) > 0
4632 #endif
4635 stack_args_size.constant += arg_size.constant;
4636 if (arg_size.var)
4637 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4639 else
4640 /* No stack slot was pushed for this parm. */
4641 stack_parm = 0;
4643 /* Update info on where next arg arrives in registers. */
4645 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4646 passed_type, named_arg);
4648 /* If we can't trust the parm stack slot to be aligned enough
4649 for its ultimate type, don't use that slot after entry.
4650 We'll make another stack slot, if we need one. */
4652 unsigned int thisparm_boundary
4653 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4655 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4656 stack_parm = 0;
4659 /* If parm was passed in memory, and we need to convert it on entry,
4660 don't store it back in that same slot. */
4661 if (entry_parm != 0
4662 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4663 stack_parm = 0;
4665 /* When an argument is passed in multiple locations, we can't
4666 make use of this information, but we can save some copying if
4667 the whole argument is passed in a single register. */
4668 if (GET_CODE (entry_parm) == PARALLEL
4669 && nominal_mode != BLKmode && passed_mode != BLKmode)
4671 int i, len = XVECLEN (entry_parm, 0);
4673 for (i = 0; i < len; i++)
4674 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4675 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4676 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4677 == passed_mode)
4678 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4680 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4681 DECL_INCOMING_RTL (parm) = entry_parm;
4682 break;
4686 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4687 in the mode in which it arrives.
4688 STACK_PARM is an RTX for a stack slot where the parameter can live
4689 during the function (in case we want to put it there).
4690 STACK_PARM is 0 if no stack slot was pushed for it.
4692 Now output code if necessary to convert ENTRY_PARM to
4693 the type in which this function declares it,
4694 and store that result in an appropriate place,
4695 which may be a pseudo reg, may be STACK_PARM,
4696 or may be a local stack slot if STACK_PARM is 0.
4698 Set DECL_RTL to that place. */
4700 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4702 /* If a BLKmode arrives in registers, copy it to a stack slot.
4703 Handle calls that pass values in multiple non-contiguous
4704 locations. The Irix 6 ABI has examples of this. */
4705 if (GET_CODE (entry_parm) == REG
4706 || GET_CODE (entry_parm) == PARALLEL)
4708 int size_stored
4709 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4710 UNITS_PER_WORD);
4712 /* Note that we will be storing an integral number of words.
4713 So we have to be careful to ensure that we allocate an
4714 integral number of words. We do this below in the
4715 assign_stack_local if space was not allocated in the argument
4716 list. If it was, this will not work if PARM_BOUNDARY is not
4717 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4718 if it becomes a problem. */
4720 if (stack_parm == 0)
4722 stack_parm
4723 = assign_stack_local (GET_MODE (entry_parm),
4724 size_stored, 0);
4725 set_mem_attributes (stack_parm, parm, 1);
4728 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4729 abort ();
4731 /* Handle calls that pass values in multiple non-contiguous
4732 locations. The Irix 6 ABI has examples of this. */
4733 if (GET_CODE (entry_parm) == PARALLEL)
4734 emit_group_store (validize_mem (stack_parm), entry_parm,
4735 int_size_in_bytes (TREE_TYPE (parm)));
4736 else
4737 move_block_from_reg (REGNO (entry_parm),
4738 validize_mem (stack_parm),
4739 size_stored / UNITS_PER_WORD,
4740 int_size_in_bytes (TREE_TYPE (parm)));
4742 SET_DECL_RTL (parm, stack_parm);
4744 else if (! ((! optimize
4745 && ! DECL_REGISTER (parm))
4746 || TREE_SIDE_EFFECTS (parm)
4747 /* If -ffloat-store specified, don't put explicit
4748 float variables into registers. */
4749 || (flag_float_store
4750 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4751 /* Always assign pseudo to structure return or item passed
4752 by invisible reference. */
4753 || passed_pointer || parm == function_result_decl)
4755 /* Store the parm in a pseudoregister during the function, but we
4756 may need to do it in a wider mode. */
4758 rtx parmreg;
4759 unsigned int regno, regnoi = 0, regnor = 0;
4761 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4763 promoted_nominal_mode
4764 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4766 parmreg = gen_reg_rtx (promoted_nominal_mode);
4767 mark_user_reg (parmreg);
4769 /* If this was an item that we received a pointer to, set DECL_RTL
4770 appropriately. */
4771 if (passed_pointer)
4773 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4774 parmreg);
4775 set_mem_attributes (x, parm, 1);
4776 SET_DECL_RTL (parm, x);
4778 else
4780 SET_DECL_RTL (parm, parmreg);
4781 maybe_set_unchanging (DECL_RTL (parm), parm);
4784 /* Copy the value into the register. */
4785 if (nominal_mode != passed_mode
4786 || promoted_nominal_mode != promoted_mode)
4788 int save_tree_used;
4789 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4790 mode, by the caller. We now have to convert it to
4791 NOMINAL_MODE, if different. However, PARMREG may be in
4792 a different mode than NOMINAL_MODE if it is being stored
4793 promoted.
4795 If ENTRY_PARM is a hard register, it might be in a register
4796 not valid for operating in its mode (e.g., an odd-numbered
4797 register for a DFmode). In that case, moves are the only
4798 thing valid, so we can't do a convert from there. This
4799 occurs when the calling sequence allow such misaligned
4800 usages.
4802 In addition, the conversion may involve a call, which could
4803 clobber parameters which haven't been copied to pseudo
4804 registers yet. Therefore, we must first copy the parm to
4805 a pseudo reg here, and save the conversion until after all
4806 parameters have been moved. */
4808 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4810 emit_move_insn (tempreg, validize_mem (entry_parm));
4812 push_to_sequence (conversion_insns);
4813 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4815 if (GET_CODE (tempreg) == SUBREG
4816 && GET_MODE (tempreg) == nominal_mode
4817 && GET_CODE (SUBREG_REG (tempreg)) == REG
4818 && nominal_mode == passed_mode
4819 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4820 && GET_MODE_SIZE (GET_MODE (tempreg))
4821 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4823 /* The argument is already sign/zero extended, so note it
4824 into the subreg. */
4825 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4826 SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
4829 /* TREE_USED gets set erroneously during expand_assignment. */
4830 save_tree_used = TREE_USED (parm);
4831 expand_assignment (parm,
4832 make_tree (nominal_type, tempreg), 0, 0);
4833 TREE_USED (parm) = save_tree_used;
4834 conversion_insns = get_insns ();
4835 did_conversion = 1;
4836 end_sequence ();
4838 else
4839 emit_move_insn (parmreg, validize_mem (entry_parm));
4841 /* If we were passed a pointer but the actual value
4842 can safely live in a register, put it in one. */
4843 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4844 /* If by-reference argument was promoted, demote it. */
4845 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4846 || ! ((! optimize
4847 && ! DECL_REGISTER (parm))
4848 || TREE_SIDE_EFFECTS (parm)
4849 /* If -ffloat-store specified, don't put explicit
4850 float variables into registers. */
4851 || (flag_float_store
4852 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4854 /* We can't use nominal_mode, because it will have been set to
4855 Pmode above. We must use the actual mode of the parm. */
4856 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4857 mark_user_reg (parmreg);
4858 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4860 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4861 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4862 push_to_sequence (conversion_insns);
4863 emit_move_insn (tempreg, DECL_RTL (parm));
4864 SET_DECL_RTL (parm,
4865 convert_to_mode (GET_MODE (parmreg),
4866 tempreg,
4867 unsigned_p));
4868 emit_move_insn (parmreg, DECL_RTL (parm));
4869 conversion_insns = get_insns();
4870 did_conversion = 1;
4871 end_sequence ();
4873 else
4874 emit_move_insn (parmreg, DECL_RTL (parm));
4875 SET_DECL_RTL (parm, parmreg);
4876 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4877 now the parm. */
4878 stack_parm = 0;
4880 #ifdef FUNCTION_ARG_CALLEE_COPIES
4881 /* If we are passed an arg by reference and it is our responsibility
4882 to make a copy, do it now.
4883 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4884 original argument, so we must recreate them in the call to
4885 FUNCTION_ARG_CALLEE_COPIES. */
4886 /* ??? Later add code to handle the case that if the argument isn't
4887 modified, don't do the copy. */
4889 else if (passed_pointer
4890 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4891 TYPE_MODE (DECL_ARG_TYPE (parm)),
4892 DECL_ARG_TYPE (parm),
4893 named_arg)
4894 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4896 rtx copy;
4897 tree type = DECL_ARG_TYPE (parm);
4899 /* This sequence may involve a library call perhaps clobbering
4900 registers that haven't been copied to pseudos yet. */
4902 push_to_sequence (conversion_insns);
4904 if (!COMPLETE_TYPE_P (type)
4905 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4906 /* This is a variable sized object. */
4907 copy = gen_rtx_MEM (BLKmode,
4908 allocate_dynamic_stack_space
4909 (expr_size (parm), NULL_RTX,
4910 TYPE_ALIGN (type)));
4911 else
4912 copy = assign_stack_temp (TYPE_MODE (type),
4913 int_size_in_bytes (type), 1);
4914 set_mem_attributes (copy, parm, 1);
4916 store_expr (parm, copy, 0);
4917 emit_move_insn (parmreg, XEXP (copy, 0));
4918 conversion_insns = get_insns ();
4919 did_conversion = 1;
4920 end_sequence ();
4922 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4924 /* In any case, record the parm's desired stack location
4925 in case we later discover it must live in the stack.
4927 If it is a COMPLEX value, store the stack location for both
4928 halves. */
4930 if (GET_CODE (parmreg) == CONCAT)
4931 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4932 else
4933 regno = REGNO (parmreg);
4935 if (regno >= max_parm_reg)
4937 rtx *new;
4938 int old_max_parm_reg = max_parm_reg;
4940 /* It's slow to expand this one register at a time,
4941 but it's also rare and we need max_parm_reg to be
4942 precisely correct. */
4943 max_parm_reg = regno + 1;
4944 new = (rtx *) xrealloc (parm_reg_stack_loc,
4945 max_parm_reg * sizeof (rtx));
4946 memset ((char *) (new + old_max_parm_reg), 0,
4947 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4948 parm_reg_stack_loc = new;
4951 if (GET_CODE (parmreg) == CONCAT)
4953 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4955 regnor = REGNO (gen_realpart (submode, parmreg));
4956 regnoi = REGNO (gen_imagpart (submode, parmreg));
4958 if (stack_parm != 0)
4960 parm_reg_stack_loc[regnor]
4961 = gen_realpart (submode, stack_parm);
4962 parm_reg_stack_loc[regnoi]
4963 = gen_imagpart (submode, stack_parm);
4965 else
4967 parm_reg_stack_loc[regnor] = 0;
4968 parm_reg_stack_loc[regnoi] = 0;
4971 else
4972 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4974 /* Mark the register as eliminable if we did no conversion
4975 and it was copied from memory at a fixed offset,
4976 and the arg pointer was not copied to a pseudo-reg.
4977 If the arg pointer is a pseudo reg or the offset formed
4978 an invalid address, such memory-equivalences
4979 as we make here would screw up life analysis for it. */
4980 if (nominal_mode == passed_mode
4981 && ! did_conversion
4982 && stack_parm != 0
4983 && GET_CODE (stack_parm) == MEM
4984 && stack_offset.var == 0
4985 && reg_mentioned_p (virtual_incoming_args_rtx,
4986 XEXP (stack_parm, 0)))
4988 rtx linsn = get_last_insn ();
4989 rtx sinsn, set;
4991 /* Mark complex types separately. */
4992 if (GET_CODE (parmreg) == CONCAT)
4993 /* Scan backwards for the set of the real and
4994 imaginary parts. */
4995 for (sinsn = linsn; sinsn != 0;
4996 sinsn = prev_nonnote_insn (sinsn))
4998 set = single_set (sinsn);
4999 if (set != 0
5000 && SET_DEST (set) == regno_reg_rtx [regnoi])
5001 REG_NOTES (sinsn)
5002 = gen_rtx_EXPR_LIST (REG_EQUIV,
5003 parm_reg_stack_loc[regnoi],
5004 REG_NOTES (sinsn));
5005 else if (set != 0
5006 && SET_DEST (set) == regno_reg_rtx [regnor])
5007 REG_NOTES (sinsn)
5008 = gen_rtx_EXPR_LIST (REG_EQUIV,
5009 parm_reg_stack_loc[regnor],
5010 REG_NOTES (sinsn));
5012 else if ((set = single_set (linsn)) != 0
5013 && SET_DEST (set) == parmreg)
5014 REG_NOTES (linsn)
5015 = gen_rtx_EXPR_LIST (REG_EQUIV,
5016 stack_parm, REG_NOTES (linsn));
5019 /* For pointer data type, suggest pointer register. */
5020 if (POINTER_TYPE_P (TREE_TYPE (parm)))
5021 mark_reg_pointer (parmreg,
5022 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5024 /* If something wants our address, try to use ADDRESSOF. */
5025 if (TREE_ADDRESSABLE (parm))
5027 /* If we end up putting something into the stack,
5028 fixup_var_refs_insns will need to make a pass over
5029 all the instructions. It looks through the pending
5030 sequences -- but it can't see the ones in the
5031 CONVERSION_INSNS, if they're not on the sequence
5032 stack. So, we go back to that sequence, just so that
5033 the fixups will happen. */
5034 push_to_sequence (conversion_insns);
5035 put_var_into_stack (parm);
5036 conversion_insns = get_insns ();
5037 end_sequence ();
5040 else
5042 /* Value must be stored in the stack slot STACK_PARM
5043 during function execution. */
5045 if (promoted_mode != nominal_mode)
5047 /* Conversion is required. */
5048 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5050 emit_move_insn (tempreg, validize_mem (entry_parm));
5052 push_to_sequence (conversion_insns);
5053 entry_parm = convert_to_mode (nominal_mode, tempreg,
5054 TREE_UNSIGNED (TREE_TYPE (parm)));
5055 if (stack_parm)
5056 /* ??? This may need a big-endian conversion on sparc64. */
5057 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5059 conversion_insns = get_insns ();
5060 did_conversion = 1;
5061 end_sequence ();
5064 if (entry_parm != stack_parm)
5066 if (stack_parm == 0)
5068 stack_parm
5069 = assign_stack_local (GET_MODE (entry_parm),
5070 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5071 set_mem_attributes (stack_parm, parm, 1);
5074 if (promoted_mode != nominal_mode)
5076 push_to_sequence (conversion_insns);
5077 emit_move_insn (validize_mem (stack_parm),
5078 validize_mem (entry_parm));
5079 conversion_insns = get_insns ();
5080 end_sequence ();
5082 else
5083 emit_move_insn (validize_mem (stack_parm),
5084 validize_mem (entry_parm));
5087 SET_DECL_RTL (parm, stack_parm);
5090 /* If this "parameter" was the place where we are receiving the
5091 function's incoming structure pointer, set up the result. */
5092 if (parm == function_result_decl)
5094 tree result = DECL_RESULT (fndecl);
5095 rtx addr = DECL_RTL (parm);
5096 rtx x;
5098 #ifdef POINTERS_EXTEND_UNSIGNED
5099 if (GET_MODE (addr) != Pmode)
5100 addr = convert_memory_address (Pmode, addr);
5101 #endif
5103 x = gen_rtx_MEM (DECL_MODE (result), addr);
5104 set_mem_attributes (x, result, 1);
5105 SET_DECL_RTL (result, x);
5108 if (GET_CODE (DECL_RTL (parm)) == REG)
5109 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5110 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5112 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5113 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5118 /* Output all parameter conversion instructions (possibly including calls)
5119 now that all parameters have been copied out of hard registers. */
5120 emit_insns (conversion_insns);
5122 last_parm_insn = get_last_insn ();
5124 current_function_args_size = stack_args_size.constant;
5126 /* Adjust function incoming argument size for alignment and
5127 minimum length. */
5129 #ifdef REG_PARM_STACK_SPACE
5130 #ifndef MAYBE_REG_PARM_STACK_SPACE
5131 current_function_args_size = MAX (current_function_args_size,
5132 REG_PARM_STACK_SPACE (fndecl));
5133 #endif
5134 #endif
5136 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5138 current_function_args_size
5139 = ((current_function_args_size + STACK_BYTES - 1)
5140 / STACK_BYTES) * STACK_BYTES;
5142 #ifdef ARGS_GROW_DOWNWARD
5143 current_function_arg_offset_rtx
5144 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5145 : expand_expr (size_diffop (stack_args_size.var,
5146 size_int (-stack_args_size.constant)),
5147 NULL_RTX, VOIDmode, 0));
5148 #else
5149 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5150 #endif
5152 /* See how many bytes, if any, of its args a function should try to pop
5153 on return. */
5155 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5156 current_function_args_size);
5158 /* For stdarg.h function, save info about
5159 regs and stack space used by the named args. */
5161 if (!hide_last_arg)
5162 current_function_args_info = args_so_far;
5164 /* Set the rtx used for the function return value. Put this in its
5165 own variable so any optimizers that need this information don't have
5166 to include tree.h. Do this here so it gets done when an inlined
5167 function gets output. */
5169 current_function_return_rtx
5170 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5171 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5173 /* If scalar return value was computed in a pseudo-reg, or was a named
5174 return value that got dumped to the stack, copy that to the hard
5175 return register. */
5176 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5178 tree decl_result = DECL_RESULT (fndecl);
5179 rtx decl_rtl = DECL_RTL (decl_result);
5181 if (REG_P (decl_rtl)
5182 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5183 : DECL_REGISTER (decl_result))
5185 rtx real_decl_rtl;
5187 #ifdef FUNCTION_OUTGOING_VALUE
5188 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5189 fndecl);
5190 #else
5191 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5192 fndecl);
5193 #endif
5194 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5195 /* The delay slot scheduler assumes that current_function_return_rtx
5196 holds the hard register containing the return value, not a
5197 temporary pseudo. */
5198 current_function_return_rtx = real_decl_rtl;
5203 /* Indicate whether REGNO is an incoming argument to the current function
5204 that was promoted to a wider mode. If so, return the RTX for the
5205 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5206 that REGNO is promoted from and whether the promotion was signed or
5207 unsigned. */
5209 #ifdef PROMOTE_FUNCTION_ARGS
5212 promoted_input_arg (regno, pmode, punsignedp)
5213 unsigned int regno;
5214 enum machine_mode *pmode;
5215 int *punsignedp;
5217 tree arg;
5219 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5220 arg = TREE_CHAIN (arg))
5221 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5222 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5223 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5225 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5226 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5228 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5229 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5230 && mode != DECL_MODE (arg))
5232 *pmode = DECL_MODE (arg);
5233 *punsignedp = unsignedp;
5234 return DECL_INCOMING_RTL (arg);
5238 return 0;
5241 #endif
5243 /* Compute the size and offset from the start of the stacked arguments for a
5244 parm passed in mode PASSED_MODE and with type TYPE.
5246 INITIAL_OFFSET_PTR points to the current offset into the stacked
5247 arguments.
5249 The starting offset and size for this parm are returned in *OFFSET_PTR
5250 and *ARG_SIZE_PTR, respectively.
5252 IN_REGS is non-zero if the argument will be passed in registers. It will
5253 never be set if REG_PARM_STACK_SPACE is not defined.
5255 FNDECL is the function in which the argument was defined.
5257 There are two types of rounding that are done. The first, controlled by
5258 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5259 list to be aligned to the specific boundary (in bits). This rounding
5260 affects the initial and starting offsets, but not the argument size.
5262 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5263 optionally rounds the size of the parm to PARM_BOUNDARY. The
5264 initial offset is not affected by this rounding, while the size always
5265 is and the starting offset may be. */
5267 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5268 initial_offset_ptr is positive because locate_and_pad_parm's
5269 callers pass in the total size of args so far as
5270 initial_offset_ptr. arg_size_ptr is always positive. */
5272 void
5273 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5274 initial_offset_ptr, offset_ptr, arg_size_ptr,
5275 alignment_pad)
5276 enum machine_mode passed_mode;
5277 tree type;
5278 int in_regs ATTRIBUTE_UNUSED;
5279 tree fndecl ATTRIBUTE_UNUSED;
5280 struct args_size *initial_offset_ptr;
5281 struct args_size *offset_ptr;
5282 struct args_size *arg_size_ptr;
5283 struct args_size *alignment_pad;
5286 tree sizetree
5287 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5288 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5289 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5291 #ifdef REG_PARM_STACK_SPACE
5292 /* If we have found a stack parm before we reach the end of the
5293 area reserved for registers, skip that area. */
5294 if (! in_regs)
5296 int reg_parm_stack_space = 0;
5298 #ifdef MAYBE_REG_PARM_STACK_SPACE
5299 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5300 #else
5301 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5302 #endif
5303 if (reg_parm_stack_space > 0)
5305 if (initial_offset_ptr->var)
5307 initial_offset_ptr->var
5308 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5309 ssize_int (reg_parm_stack_space));
5310 initial_offset_ptr->constant = 0;
5312 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5313 initial_offset_ptr->constant = reg_parm_stack_space;
5316 #endif /* REG_PARM_STACK_SPACE */
5318 arg_size_ptr->var = 0;
5319 arg_size_ptr->constant = 0;
5320 alignment_pad->var = 0;
5321 alignment_pad->constant = 0;
5323 #ifdef ARGS_GROW_DOWNWARD
5324 if (initial_offset_ptr->var)
5326 offset_ptr->constant = 0;
5327 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5328 initial_offset_ptr->var);
5330 else
5332 offset_ptr->constant = -initial_offset_ptr->constant;
5333 offset_ptr->var = 0;
5335 if (where_pad != none
5336 && (!host_integerp (sizetree, 1)
5337 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5338 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5339 SUB_PARM_SIZE (*offset_ptr, sizetree);
5340 if (where_pad != downward)
5341 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5342 if (initial_offset_ptr->var)
5343 arg_size_ptr->var = size_binop (MINUS_EXPR,
5344 size_binop (MINUS_EXPR,
5345 ssize_int (0),
5346 initial_offset_ptr->var),
5347 offset_ptr->var);
5349 else
5350 arg_size_ptr->constant = (-initial_offset_ptr->constant
5351 - offset_ptr->constant);
5353 #else /* !ARGS_GROW_DOWNWARD */
5354 if (!in_regs
5355 #ifdef REG_PARM_STACK_SPACE
5356 || REG_PARM_STACK_SPACE (fndecl) > 0
5357 #endif
5359 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5360 *offset_ptr = *initial_offset_ptr;
5362 #ifdef PUSH_ROUNDING
5363 if (passed_mode != BLKmode)
5364 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5365 #endif
5367 /* Pad_below needs the pre-rounded size to know how much to pad below
5368 so this must be done before rounding up. */
5369 if (where_pad == downward
5370 /* However, BLKmode args passed in regs have their padding done elsewhere.
5371 The stack slot must be able to hold the entire register. */
5372 && !(in_regs && passed_mode == BLKmode))
5373 pad_below (offset_ptr, passed_mode, sizetree);
5375 if (where_pad != none
5376 && (!host_integerp (sizetree, 1)
5377 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5378 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5380 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5381 #endif /* ARGS_GROW_DOWNWARD */
5384 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5385 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5387 static void
5388 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5389 struct args_size *offset_ptr;
5390 int boundary;
5391 struct args_size *alignment_pad;
5393 tree save_var = NULL_TREE;
5394 HOST_WIDE_INT save_constant = 0;
5396 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5398 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5400 save_var = offset_ptr->var;
5401 save_constant = offset_ptr->constant;
5404 alignment_pad->var = NULL_TREE;
5405 alignment_pad->constant = 0;
5407 if (boundary > BITS_PER_UNIT)
5409 if (offset_ptr->var)
5411 offset_ptr->var =
5412 #ifdef ARGS_GROW_DOWNWARD
5413 round_down
5414 #else
5415 round_up
5416 #endif
5417 (ARGS_SIZE_TREE (*offset_ptr),
5418 boundary / BITS_PER_UNIT);
5419 offset_ptr->constant = 0; /*?*/
5420 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5421 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5422 save_var);
5424 else
5426 offset_ptr->constant =
5427 #ifdef ARGS_GROW_DOWNWARD
5428 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5429 #else
5430 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5431 #endif
5432 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5433 alignment_pad->constant = offset_ptr->constant - save_constant;
5438 #ifndef ARGS_GROW_DOWNWARD
5439 static void
5440 pad_below (offset_ptr, passed_mode, sizetree)
5441 struct args_size *offset_ptr;
5442 enum machine_mode passed_mode;
5443 tree sizetree;
5445 if (passed_mode != BLKmode)
5447 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5448 offset_ptr->constant
5449 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5450 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5451 - GET_MODE_SIZE (passed_mode));
5453 else
5455 if (TREE_CODE (sizetree) != INTEGER_CST
5456 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5458 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5459 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5460 /* Add it in. */
5461 ADD_PARM_SIZE (*offset_ptr, s2);
5462 SUB_PARM_SIZE (*offset_ptr, sizetree);
5466 #endif
5468 /* Walk the tree of blocks describing the binding levels within a function
5469 and warn about uninitialized variables.
5470 This is done after calling flow_analysis and before global_alloc
5471 clobbers the pseudo-regs to hard regs. */
5473 void
5474 uninitialized_vars_warning (block)
5475 tree block;
5477 tree decl, sub;
5478 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5480 if (warn_uninitialized
5481 && TREE_CODE (decl) == VAR_DECL
5482 /* These warnings are unreliable for and aggregates
5483 because assigning the fields one by one can fail to convince
5484 flow.c that the entire aggregate was initialized.
5485 Unions are troublesome because members may be shorter. */
5486 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5487 && DECL_RTL (decl) != 0
5488 && GET_CODE (DECL_RTL (decl)) == REG
5489 /* Global optimizations can make it difficult to determine if a
5490 particular variable has been initialized. However, a VAR_DECL
5491 with a nonzero DECL_INITIAL had an initializer, so do not
5492 claim it is potentially uninitialized.
5494 We do not care about the actual value in DECL_INITIAL, so we do
5495 not worry that it may be a dangling pointer. */
5496 && DECL_INITIAL (decl) == NULL_TREE
5497 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5498 warning_with_decl (decl,
5499 "`%s' might be used uninitialized in this function");
5500 if (extra_warnings
5501 && TREE_CODE (decl) == VAR_DECL
5502 && DECL_RTL (decl) != 0
5503 && GET_CODE (DECL_RTL (decl)) == REG
5504 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5505 warning_with_decl (decl,
5506 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5508 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5509 uninitialized_vars_warning (sub);
5512 /* Do the appropriate part of uninitialized_vars_warning
5513 but for arguments instead of local variables. */
5515 void
5516 setjmp_args_warning ()
5518 tree decl;
5519 for (decl = DECL_ARGUMENTS (current_function_decl);
5520 decl; decl = TREE_CHAIN (decl))
5521 if (DECL_RTL (decl) != 0
5522 && GET_CODE (DECL_RTL (decl)) == REG
5523 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5524 warning_with_decl (decl,
5525 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5528 /* If this function call setjmp, put all vars into the stack
5529 unless they were declared `register'. */
5531 void
5532 setjmp_protect (block)
5533 tree block;
5535 tree decl, sub;
5536 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5537 if ((TREE_CODE (decl) == VAR_DECL
5538 || TREE_CODE (decl) == PARM_DECL)
5539 && DECL_RTL (decl) != 0
5540 && (GET_CODE (DECL_RTL (decl)) == REG
5541 || (GET_CODE (DECL_RTL (decl)) == MEM
5542 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5543 /* If this variable came from an inline function, it must be
5544 that its life doesn't overlap the setjmp. If there was a
5545 setjmp in the function, it would already be in memory. We
5546 must exclude such variable because their DECL_RTL might be
5547 set to strange things such as virtual_stack_vars_rtx. */
5548 && ! DECL_FROM_INLINE (decl)
5549 && (
5550 #ifdef NON_SAVING_SETJMP
5551 /* If longjmp doesn't restore the registers,
5552 don't put anything in them. */
5553 NON_SAVING_SETJMP
5555 #endif
5556 ! DECL_REGISTER (decl)))
5557 put_var_into_stack (decl);
5558 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5559 setjmp_protect (sub);
5562 /* Like the previous function, but for args instead of local variables. */
5564 void
5565 setjmp_protect_args ()
5567 tree decl;
5568 for (decl = DECL_ARGUMENTS (current_function_decl);
5569 decl; decl = TREE_CHAIN (decl))
5570 if ((TREE_CODE (decl) == VAR_DECL
5571 || TREE_CODE (decl) == PARM_DECL)
5572 && DECL_RTL (decl) != 0
5573 && (GET_CODE (DECL_RTL (decl)) == REG
5574 || (GET_CODE (DECL_RTL (decl)) == MEM
5575 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5576 && (
5577 /* If longjmp doesn't restore the registers,
5578 don't put anything in them. */
5579 #ifdef NON_SAVING_SETJMP
5580 NON_SAVING_SETJMP
5582 #endif
5583 ! DECL_REGISTER (decl)))
5584 put_var_into_stack (decl);
5587 /* Return the context-pointer register corresponding to DECL,
5588 or 0 if it does not need one. */
5591 lookup_static_chain (decl)
5592 tree decl;
5594 tree context = decl_function_context (decl);
5595 tree link;
5597 if (context == 0
5598 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5599 return 0;
5601 /* We treat inline_function_decl as an alias for the current function
5602 because that is the inline function whose vars, types, etc.
5603 are being merged into the current function.
5604 See expand_inline_function. */
5605 if (context == current_function_decl || context == inline_function_decl)
5606 return virtual_stack_vars_rtx;
5608 for (link = context_display; link; link = TREE_CHAIN (link))
5609 if (TREE_PURPOSE (link) == context)
5610 return RTL_EXPR_RTL (TREE_VALUE (link));
5612 abort ();
5615 /* Convert a stack slot address ADDR for variable VAR
5616 (from a containing function)
5617 into an address valid in this function (using a static chain). */
5620 fix_lexical_addr (addr, var)
5621 rtx addr;
5622 tree var;
5624 rtx basereg;
5625 HOST_WIDE_INT displacement;
5626 tree context = decl_function_context (var);
5627 struct function *fp;
5628 rtx base = 0;
5630 /* If this is the present function, we need not do anything. */
5631 if (context == current_function_decl || context == inline_function_decl)
5632 return addr;
5634 fp = find_function_data (context);
5636 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5637 addr = XEXP (XEXP (addr, 0), 0);
5639 /* Decode given address as base reg plus displacement. */
5640 if (GET_CODE (addr) == REG)
5641 basereg = addr, displacement = 0;
5642 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5643 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5644 else
5645 abort ();
5647 /* We accept vars reached via the containing function's
5648 incoming arg pointer and via its stack variables pointer. */
5649 if (basereg == fp->internal_arg_pointer)
5651 /* If reached via arg pointer, get the arg pointer value
5652 out of that function's stack frame.
5654 There are two cases: If a separate ap is needed, allocate a
5655 slot in the outer function for it and dereference it that way.
5656 This is correct even if the real ap is actually a pseudo.
5657 Otherwise, just adjust the offset from the frame pointer to
5658 compensate. */
5660 #ifdef NEED_SEPARATE_AP
5661 rtx addr;
5663 addr = get_arg_pointer_save_area (fp);
5664 addr = fix_lexical_addr (XEXP (addr, 0), var);
5665 addr = memory_address (Pmode, addr);
5667 base = gen_rtx_MEM (Pmode, addr);
5668 set_mem_alias_set (base, get_frame_alias_set ());
5669 base = copy_to_reg (base);
5670 #else
5671 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5672 base = lookup_static_chain (var);
5673 #endif
5676 else if (basereg == virtual_stack_vars_rtx)
5678 /* This is the same code as lookup_static_chain, duplicated here to
5679 avoid an extra call to decl_function_context. */
5680 tree link;
5682 for (link = context_display; link; link = TREE_CHAIN (link))
5683 if (TREE_PURPOSE (link) == context)
5685 base = RTL_EXPR_RTL (TREE_VALUE (link));
5686 break;
5690 if (base == 0)
5691 abort ();
5693 /* Use same offset, relative to appropriate static chain or argument
5694 pointer. */
5695 return plus_constant (base, displacement);
5698 /* Return the address of the trampoline for entering nested fn FUNCTION.
5699 If necessary, allocate a trampoline (in the stack frame)
5700 and emit rtl to initialize its contents (at entry to this function). */
5703 trampoline_address (function)
5704 tree function;
5706 tree link;
5707 tree rtlexp;
5708 rtx tramp;
5709 struct function *fp;
5710 tree fn_context;
5712 /* Find an existing trampoline and return it. */
5713 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5714 if (TREE_PURPOSE (link) == function)
5715 return
5716 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5718 for (fp = outer_function_chain; fp; fp = fp->outer)
5719 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5720 if (TREE_PURPOSE (link) == function)
5722 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5723 function);
5724 return adjust_trampoline_addr (tramp);
5727 /* None exists; we must make one. */
5729 /* Find the `struct function' for the function containing FUNCTION. */
5730 fp = 0;
5731 fn_context = decl_function_context (function);
5732 if (fn_context != current_function_decl
5733 && fn_context != inline_function_decl)
5734 fp = find_function_data (fn_context);
5736 /* Allocate run-time space for this trampoline
5737 (usually in the defining function's stack frame). */
5738 #ifdef ALLOCATE_TRAMPOLINE
5739 tramp = ALLOCATE_TRAMPOLINE (fp);
5740 #else
5741 /* If rounding needed, allocate extra space
5742 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5743 #ifdef TRAMPOLINE_ALIGNMENT
5744 #define TRAMPOLINE_REAL_SIZE \
5745 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5746 #else
5747 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5748 #endif
5749 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5750 fp ? fp : cfun);
5751 #endif
5753 /* Record the trampoline for reuse and note it for later initialization
5754 by expand_function_end. */
5755 if (fp != 0)
5757 rtlexp = make_node (RTL_EXPR);
5758 RTL_EXPR_RTL (rtlexp) = tramp;
5759 fp->x_trampoline_list = tree_cons (function, rtlexp,
5760 fp->x_trampoline_list);
5762 else
5764 /* Make the RTL_EXPR node temporary, not momentary, so that the
5765 trampoline_list doesn't become garbage. */
5766 rtlexp = make_node (RTL_EXPR);
5768 RTL_EXPR_RTL (rtlexp) = tramp;
5769 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5772 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5773 return adjust_trampoline_addr (tramp);
5776 /* Given a trampoline address,
5777 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5779 static rtx
5780 round_trampoline_addr (tramp)
5781 rtx tramp;
5783 #ifdef TRAMPOLINE_ALIGNMENT
5784 /* Round address up to desired boundary. */
5785 rtx temp = gen_reg_rtx (Pmode);
5786 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5787 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5789 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5790 temp, 0, OPTAB_LIB_WIDEN);
5791 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5792 temp, 0, OPTAB_LIB_WIDEN);
5793 #endif
5794 return tramp;
5797 /* Given a trampoline address, round it then apply any
5798 platform-specific adjustments so that the result can be used for a
5799 function call . */
5801 static rtx
5802 adjust_trampoline_addr (tramp)
5803 rtx tramp;
5805 tramp = round_trampoline_addr (tramp);
5806 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5807 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5808 #endif
5809 return tramp;
5812 /* Put all this function's BLOCK nodes including those that are chained
5813 onto the first block into a vector, and return it.
5814 Also store in each NOTE for the beginning or end of a block
5815 the index of that block in the vector.
5816 The arguments are BLOCK, the chain of top-level blocks of the function,
5817 and INSNS, the insn chain of the function. */
5819 void
5820 identify_blocks ()
5822 int n_blocks;
5823 tree *block_vector, *last_block_vector;
5824 tree *block_stack;
5825 tree block = DECL_INITIAL (current_function_decl);
5827 if (block == 0)
5828 return;
5830 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5831 depth-first order. */
5832 block_vector = get_block_vector (block, &n_blocks);
5833 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5835 last_block_vector = identify_blocks_1 (get_insns (),
5836 block_vector + 1,
5837 block_vector + n_blocks,
5838 block_stack);
5840 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5841 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5842 if (0 && last_block_vector != block_vector + n_blocks)
5843 abort ();
5845 free (block_vector);
5846 free (block_stack);
5849 /* Subroutine of identify_blocks. Do the block substitution on the
5850 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5852 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5853 BLOCK_VECTOR is incremented for each block seen. */
5855 static tree *
5856 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5857 rtx insns;
5858 tree *block_vector;
5859 tree *end_block_vector;
5860 tree *orig_block_stack;
5862 rtx insn;
5863 tree *block_stack = orig_block_stack;
5865 for (insn = insns; insn; insn = NEXT_INSN (insn))
5867 if (GET_CODE (insn) == NOTE)
5869 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5871 tree b;
5873 /* If there are more block notes than BLOCKs, something
5874 is badly wrong. */
5875 if (block_vector == end_block_vector)
5876 abort ();
5878 b = *block_vector++;
5879 NOTE_BLOCK (insn) = b;
5880 *block_stack++ = b;
5882 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5884 /* If there are more NOTE_INSN_BLOCK_ENDs than
5885 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5886 if (block_stack == orig_block_stack)
5887 abort ();
5889 NOTE_BLOCK (insn) = *--block_stack;
5892 else if (GET_CODE (insn) == CALL_INSN
5893 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5895 rtx cp = PATTERN (insn);
5897 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5898 end_block_vector, block_stack);
5899 if (XEXP (cp, 1))
5900 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5901 end_block_vector, block_stack);
5902 if (XEXP (cp, 2))
5903 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5904 end_block_vector, block_stack);
5908 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5909 something is badly wrong. */
5910 if (block_stack != orig_block_stack)
5911 abort ();
5913 return block_vector;
5916 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5917 and create duplicate blocks. */
5918 /* ??? Need an option to either create block fragments or to create
5919 abstract origin duplicates of a source block. It really depends
5920 on what optimization has been performed. */
5922 void
5923 reorder_blocks ()
5925 tree block = DECL_INITIAL (current_function_decl);
5926 varray_type block_stack;
5928 if (block == NULL_TREE)
5929 return;
5931 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5933 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5934 reorder_blocks_0 (block);
5936 /* Prune the old trees away, so that they don't get in the way. */
5937 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5938 BLOCK_CHAIN (block) = NULL_TREE;
5940 /* Recreate the block tree from the note nesting. */
5941 reorder_blocks_1 (get_insns (), block, &block_stack);
5942 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5944 /* Remove deleted blocks from the block fragment chains. */
5945 reorder_fix_fragments (block);
5947 VARRAY_FREE (block_stack);
5950 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5952 static void
5953 reorder_blocks_0 (block)
5954 tree block;
5956 while (block)
5958 TREE_ASM_WRITTEN (block) = 0;
5959 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5960 block = BLOCK_CHAIN (block);
5964 static void
5965 reorder_blocks_1 (insns, current_block, p_block_stack)
5966 rtx insns;
5967 tree current_block;
5968 varray_type *p_block_stack;
5970 rtx insn;
5972 for (insn = insns; insn; insn = NEXT_INSN (insn))
5974 if (GET_CODE (insn) == NOTE)
5976 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5978 tree block = NOTE_BLOCK (insn);
5980 /* If we have seen this block before, that means it now
5981 spans multiple address regions. Create a new fragment. */
5982 if (TREE_ASM_WRITTEN (block))
5984 tree new_block = copy_node (block);
5985 tree origin;
5987 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5988 ? BLOCK_FRAGMENT_ORIGIN (block)
5989 : block);
5990 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5991 BLOCK_FRAGMENT_CHAIN (new_block)
5992 = BLOCK_FRAGMENT_CHAIN (origin);
5993 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5995 NOTE_BLOCK (insn) = new_block;
5996 block = new_block;
5999 BLOCK_SUBBLOCKS (block) = 0;
6000 TREE_ASM_WRITTEN (block) = 1;
6001 BLOCK_SUPERCONTEXT (block) = current_block;
6002 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6003 BLOCK_SUBBLOCKS (current_block) = block;
6004 current_block = block;
6005 VARRAY_PUSH_TREE (*p_block_stack, block);
6007 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6009 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6010 VARRAY_POP (*p_block_stack);
6011 BLOCK_SUBBLOCKS (current_block)
6012 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6013 current_block = BLOCK_SUPERCONTEXT (current_block);
6016 else if (GET_CODE (insn) == CALL_INSN
6017 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6019 rtx cp = PATTERN (insn);
6020 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6021 if (XEXP (cp, 1))
6022 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6023 if (XEXP (cp, 2))
6024 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6029 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6030 appears in the block tree, select one of the fragments to become
6031 the new origin block. */
6033 static void
6034 reorder_fix_fragments (block)
6035 tree block;
6037 while (block)
6039 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6040 tree new_origin = NULL_TREE;
6042 if (dup_origin)
6044 if (! TREE_ASM_WRITTEN (dup_origin))
6046 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6048 /* Find the first of the remaining fragments. There must
6049 be at least one -- the current block. */
6050 while (! TREE_ASM_WRITTEN (new_origin))
6051 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6052 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6055 else if (! dup_origin)
6056 new_origin = block;
6058 /* Re-root the rest of the fragments to the new origin. In the
6059 case that DUP_ORIGIN was null, that means BLOCK was the origin
6060 of a chain of fragments and we want to remove those fragments
6061 that didn't make it to the output. */
6062 if (new_origin)
6064 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6065 tree chain = *pp;
6067 while (chain)
6069 if (TREE_ASM_WRITTEN (chain))
6071 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6072 *pp = chain;
6073 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6075 chain = BLOCK_FRAGMENT_CHAIN (chain);
6077 *pp = NULL_TREE;
6080 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6081 block = BLOCK_CHAIN (block);
6085 /* Reverse the order of elements in the chain T of blocks,
6086 and return the new head of the chain (old last element). */
6088 static tree
6089 blocks_nreverse (t)
6090 tree t;
6092 tree prev = 0, decl, next;
6093 for (decl = t; decl; decl = next)
6095 next = BLOCK_CHAIN (decl);
6096 BLOCK_CHAIN (decl) = prev;
6097 prev = decl;
6099 return prev;
6102 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6103 non-NULL, list them all into VECTOR, in a depth-first preorder
6104 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6105 blocks. */
6107 static int
6108 all_blocks (block, vector)
6109 tree block;
6110 tree *vector;
6112 int n_blocks = 0;
6114 while (block)
6116 TREE_ASM_WRITTEN (block) = 0;
6118 /* Record this block. */
6119 if (vector)
6120 vector[n_blocks] = block;
6122 ++n_blocks;
6124 /* Record the subblocks, and their subblocks... */
6125 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6126 vector ? vector + n_blocks : 0);
6127 block = BLOCK_CHAIN (block);
6130 return n_blocks;
6133 /* Return a vector containing all the blocks rooted at BLOCK. The
6134 number of elements in the vector is stored in N_BLOCKS_P. The
6135 vector is dynamically allocated; it is the caller's responsibility
6136 to call `free' on the pointer returned. */
6138 static tree *
6139 get_block_vector (block, n_blocks_p)
6140 tree block;
6141 int *n_blocks_p;
6143 tree *block_vector;
6145 *n_blocks_p = all_blocks (block, NULL);
6146 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6147 all_blocks (block, block_vector);
6149 return block_vector;
6152 static int next_block_index = 2;
6154 /* Set BLOCK_NUMBER for all the blocks in FN. */
6156 void
6157 number_blocks (fn)
6158 tree fn;
6160 int i;
6161 int n_blocks;
6162 tree *block_vector;
6164 /* For SDB and XCOFF debugging output, we start numbering the blocks
6165 from 1 within each function, rather than keeping a running
6166 count. */
6167 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6168 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6169 next_block_index = 1;
6170 #endif
6172 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6174 /* The top-level BLOCK isn't numbered at all. */
6175 for (i = 1; i < n_blocks; ++i)
6176 /* We number the blocks from two. */
6177 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6179 free (block_vector);
6181 return;
6184 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6186 tree
6187 debug_find_var_in_block_tree (var, block)
6188 tree var;
6189 tree block;
6191 tree t;
6193 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6194 if (t == var)
6195 return block;
6197 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6199 tree ret = debug_find_var_in_block_tree (var, t);
6200 if (ret)
6201 return ret;
6204 return NULL_TREE;
6207 /* Allocate a function structure and reset its contents to the defaults. */
6209 static void
6210 prepare_function_start ()
6212 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6214 init_stmt_for_function ();
6215 init_eh_for_function ();
6217 cse_not_expected = ! optimize;
6219 /* Caller save not needed yet. */
6220 caller_save_needed = 0;
6222 /* No stack slots have been made yet. */
6223 stack_slot_list = 0;
6225 current_function_has_nonlocal_label = 0;
6226 current_function_has_nonlocal_goto = 0;
6228 /* There is no stack slot for handling nonlocal gotos. */
6229 nonlocal_goto_handler_slots = 0;
6230 nonlocal_goto_stack_level = 0;
6232 /* No labels have been declared for nonlocal use. */
6233 nonlocal_labels = 0;
6234 nonlocal_goto_handler_labels = 0;
6236 /* No function calls so far in this function. */
6237 function_call_count = 0;
6239 /* No parm regs have been allocated.
6240 (This is important for output_inline_function.) */
6241 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6243 /* Initialize the RTL mechanism. */
6244 init_emit ();
6246 /* Initialize the queue of pending postincrement and postdecrements,
6247 and some other info in expr.c. */
6248 init_expr ();
6250 /* We haven't done register allocation yet. */
6251 reg_renumber = 0;
6253 init_varasm_status (cfun);
6255 /* Clear out data used for inlining. */
6256 cfun->inlinable = 0;
6257 cfun->original_decl_initial = 0;
6258 cfun->original_arg_vector = 0;
6260 cfun->stack_alignment_needed = STACK_BOUNDARY;
6261 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6263 /* Set if a call to setjmp is seen. */
6264 current_function_calls_setjmp = 0;
6266 /* Set if a call to longjmp is seen. */
6267 current_function_calls_longjmp = 0;
6269 current_function_calls_alloca = 0;
6270 current_function_contains_functions = 0;
6271 current_function_is_leaf = 0;
6272 current_function_nothrow = 0;
6273 current_function_sp_is_unchanging = 0;
6274 current_function_uses_only_leaf_regs = 0;
6275 current_function_has_computed_jump = 0;
6276 current_function_is_thunk = 0;
6278 current_function_returns_pcc_struct = 0;
6279 current_function_returns_struct = 0;
6280 current_function_epilogue_delay_list = 0;
6281 current_function_uses_const_pool = 0;
6282 current_function_uses_pic_offset_table = 0;
6283 current_function_cannot_inline = 0;
6285 /* We have not yet needed to make a label to jump to for tail-recursion. */
6286 tail_recursion_label = 0;
6288 /* We haven't had a need to make a save area for ap yet. */
6289 arg_pointer_save_area = 0;
6291 /* No stack slots allocated yet. */
6292 frame_offset = 0;
6294 /* No SAVE_EXPRs in this function yet. */
6295 save_expr_regs = 0;
6297 /* No RTL_EXPRs in this function yet. */
6298 rtl_expr_chain = 0;
6300 /* Set up to allocate temporaries. */
6301 init_temp_slots ();
6303 /* Indicate that we need to distinguish between the return value of the
6304 present function and the return value of a function being called. */
6305 rtx_equal_function_value_matters = 1;
6307 /* Indicate that we have not instantiated virtual registers yet. */
6308 virtuals_instantiated = 0;
6310 /* Indicate that we want CONCATs now. */
6311 generating_concat_p = 1;
6313 /* Indicate we have no need of a frame pointer yet. */
6314 frame_pointer_needed = 0;
6316 /* By default assume not varargs or stdarg. */
6317 current_function_varargs = 0;
6318 current_function_stdarg = 0;
6320 /* We haven't made any trampolines for this function yet. */
6321 trampoline_list = 0;
6323 init_pending_stack_adjust ();
6324 inhibit_defer_pop = 0;
6326 current_function_outgoing_args_size = 0;
6328 if (init_lang_status)
6329 (*init_lang_status) (cfun);
6330 if (init_machine_status)
6331 (*init_machine_status) (cfun);
6334 /* Initialize the rtl expansion mechanism so that we can do simple things
6335 like generate sequences. This is used to provide a context during global
6336 initialization of some passes. */
6337 void
6338 init_dummy_function_start ()
6340 prepare_function_start ();
6343 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6344 and initialize static variables for generating RTL for the statements
6345 of the function. */
6347 void
6348 init_function_start (subr, filename, line)
6349 tree subr;
6350 const char *filename;
6351 int line;
6353 prepare_function_start ();
6355 current_function_name = (*decl_printable_name) (subr, 2);
6356 cfun->decl = subr;
6358 /* Nonzero if this is a nested function that uses a static chain. */
6360 current_function_needs_context
6361 = (decl_function_context (current_function_decl) != 0
6362 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6364 /* Within function body, compute a type's size as soon it is laid out. */
6365 immediate_size_expand++;
6367 /* Prevent ever trying to delete the first instruction of a function.
6368 Also tell final how to output a linenum before the function prologue.
6369 Note linenums could be missing, e.g. when compiling a Java .class file. */
6370 if (line > 0)
6371 emit_line_note (filename, line);
6373 /* Make sure first insn is a note even if we don't want linenums.
6374 This makes sure the first insn will never be deleted.
6375 Also, final expects a note to appear there. */
6376 emit_note (NULL, NOTE_INSN_DELETED);
6378 /* Set flags used by final.c. */
6379 if (aggregate_value_p (DECL_RESULT (subr)))
6381 #ifdef PCC_STATIC_STRUCT_RETURN
6382 current_function_returns_pcc_struct = 1;
6383 #endif
6384 current_function_returns_struct = 1;
6387 /* Warn if this value is an aggregate type,
6388 regardless of which calling convention we are using for it. */
6389 if (warn_aggregate_return
6390 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6391 warning ("function returns an aggregate");
6393 current_function_returns_pointer
6394 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6397 /* Make sure all values used by the optimization passes have sane
6398 defaults. */
6399 void
6400 init_function_for_compilation ()
6402 reg_renumber = 0;
6404 /* No prologue/epilogue insns yet. */
6405 VARRAY_GROW (prologue, 0);
6406 VARRAY_GROW (epilogue, 0);
6407 VARRAY_GROW (sibcall_epilogue, 0);
6410 /* Indicate that the current function uses extra args
6411 not explicitly mentioned in the argument list in any fashion. */
6413 void
6414 mark_varargs ()
6416 current_function_varargs = 1;
6419 /* Expand a call to __main at the beginning of a possible main function. */
6421 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6422 #undef HAS_INIT_SECTION
6423 #define HAS_INIT_SECTION
6424 #endif
6426 void
6427 expand_main_function ()
6429 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6430 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6432 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6433 rtx tmp, seq;
6435 start_sequence ();
6436 /* Forcibly align the stack. */
6437 #ifdef STACK_GROWS_DOWNWARD
6438 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6439 stack_pointer_rtx, 1, OPTAB_WIDEN);
6440 #else
6441 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6442 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6443 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6444 stack_pointer_rtx, 1, OPTAB_WIDEN);
6445 #endif
6446 if (tmp != stack_pointer_rtx)
6447 emit_move_insn (stack_pointer_rtx, tmp);
6449 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6450 tmp = force_reg (Pmode, const0_rtx);
6451 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6452 seq = gen_sequence ();
6453 end_sequence ();
6455 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6456 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6457 break;
6458 if (tmp)
6459 emit_insn_before (seq, tmp);
6460 else
6461 emit_insn (seq);
6463 #endif
6465 #ifndef HAS_INIT_SECTION
6466 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6467 VOIDmode, 0);
6468 #endif
6471 extern struct obstack permanent_obstack;
6473 /* The PENDING_SIZES represent the sizes of variable-sized types.
6474 Create RTL for the various sizes now (using temporary variables),
6475 so that we can refer to the sizes from the RTL we are generating
6476 for the current function. The PENDING_SIZES are a TREE_LIST. The
6477 TREE_VALUE of each node is a SAVE_EXPR. */
6479 void
6480 expand_pending_sizes (pending_sizes)
6481 tree pending_sizes;
6483 tree tem;
6485 /* Evaluate now the sizes of any types declared among the arguments. */
6486 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6488 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6489 /* Flush the queue in case this parameter declaration has
6490 side-effects. */
6491 emit_queue ();
6495 /* Start the RTL for a new function, and set variables used for
6496 emitting RTL.
6497 SUBR is the FUNCTION_DECL node.
6498 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6499 the function's parameters, which must be run at any return statement. */
6501 void
6502 expand_function_start (subr, parms_have_cleanups)
6503 tree subr;
6504 int parms_have_cleanups;
6506 tree tem;
6507 rtx last_ptr = NULL_RTX;
6509 /* Make sure volatile mem refs aren't considered
6510 valid operands of arithmetic insns. */
6511 init_recog_no_volatile ();
6513 current_function_instrument_entry_exit
6514 = (flag_instrument_function_entry_exit
6515 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6517 current_function_profile
6518 = (profile_flag
6519 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6521 current_function_limit_stack
6522 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6524 /* If function gets a static chain arg, store it in the stack frame.
6525 Do this first, so it gets the first stack slot offset. */
6526 if (current_function_needs_context)
6528 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6530 /* Delay copying static chain if it is not a register to avoid
6531 conflicts with regs used for parameters. */
6532 if (! SMALL_REGISTER_CLASSES
6533 || GET_CODE (static_chain_incoming_rtx) == REG)
6534 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6537 /* If the parameters of this function need cleaning up, get a label
6538 for the beginning of the code which executes those cleanups. This must
6539 be done before doing anything with return_label. */
6540 if (parms_have_cleanups)
6541 cleanup_label = gen_label_rtx ();
6542 else
6543 cleanup_label = 0;
6545 /* Make the label for return statements to jump to. Do not special
6546 case machines with special return instructions -- they will be
6547 handled later during jump, ifcvt, or epilogue creation. */
6548 return_label = gen_label_rtx ();
6550 /* Initialize rtx used to return the value. */
6551 /* Do this before assign_parms so that we copy the struct value address
6552 before any library calls that assign parms might generate. */
6554 /* Decide whether to return the value in memory or in a register. */
6555 if (aggregate_value_p (DECL_RESULT (subr)))
6557 /* Returning something that won't go in a register. */
6558 rtx value_address = 0;
6560 #ifdef PCC_STATIC_STRUCT_RETURN
6561 if (current_function_returns_pcc_struct)
6563 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6564 value_address = assemble_static_space (size);
6566 else
6567 #endif
6569 /* Expect to be passed the address of a place to store the value.
6570 If it is passed as an argument, assign_parms will take care of
6571 it. */
6572 if (struct_value_incoming_rtx)
6574 value_address = gen_reg_rtx (Pmode);
6575 emit_move_insn (value_address, struct_value_incoming_rtx);
6578 if (value_address)
6580 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6581 set_mem_attributes (x, DECL_RESULT (subr), 1);
6582 SET_DECL_RTL (DECL_RESULT (subr), x);
6585 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6586 /* If return mode is void, this decl rtl should not be used. */
6587 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6588 else
6590 /* Compute the return values into a pseudo reg, which we will copy
6591 into the true return register after the cleanups are done. */
6593 /* In order to figure out what mode to use for the pseudo, we
6594 figure out what the mode of the eventual return register will
6595 actually be, and use that. */
6596 rtx hard_reg
6597 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6598 subr, 1);
6600 /* Structures that are returned in registers are not aggregate_value_p,
6601 so we may see a PARALLEL. Don't play pseudo games with this. */
6602 if (! REG_P (hard_reg))
6603 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6604 else
6606 /* Create the pseudo. */
6607 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6609 /* Needed because we may need to move this to memory
6610 in case it's a named return value whose address is taken. */
6611 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6615 /* Initialize rtx for parameters and local variables.
6616 In some cases this requires emitting insns. */
6618 assign_parms (subr);
6620 /* Copy the static chain now if it wasn't a register. The delay is to
6621 avoid conflicts with the parameter passing registers. */
6623 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6624 if (GET_CODE (static_chain_incoming_rtx) != REG)
6625 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6627 /* The following was moved from init_function_start.
6628 The move is supposed to make sdb output more accurate. */
6629 /* Indicate the beginning of the function body,
6630 as opposed to parm setup. */
6631 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6633 if (GET_CODE (get_last_insn ()) != NOTE)
6634 emit_note (NULL, NOTE_INSN_DELETED);
6635 parm_birth_insn = get_last_insn ();
6637 context_display = 0;
6638 if (current_function_needs_context)
6640 /* Fetch static chain values for containing functions. */
6641 tem = decl_function_context (current_function_decl);
6642 /* Copy the static chain pointer into a pseudo. If we have
6643 small register classes, copy the value from memory if
6644 static_chain_incoming_rtx is a REG. */
6645 if (tem)
6647 /* If the static chain originally came in a register, put it back
6648 there, then move it out in the next insn. The reason for
6649 this peculiar code is to satisfy function integration. */
6650 if (SMALL_REGISTER_CLASSES
6651 && GET_CODE (static_chain_incoming_rtx) == REG)
6652 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6653 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6656 while (tem)
6658 tree rtlexp = make_node (RTL_EXPR);
6660 RTL_EXPR_RTL (rtlexp) = last_ptr;
6661 context_display = tree_cons (tem, rtlexp, context_display);
6662 tem = decl_function_context (tem);
6663 if (tem == 0)
6664 break;
6665 /* Chain thru stack frames, assuming pointer to next lexical frame
6666 is found at the place we always store it. */
6667 #ifdef FRAME_GROWS_DOWNWARD
6668 last_ptr = plus_constant (last_ptr,
6669 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6670 #endif
6671 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6672 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6673 last_ptr = copy_to_reg (last_ptr);
6675 /* If we are not optimizing, ensure that we know that this
6676 piece of context is live over the entire function. */
6677 if (! optimize)
6678 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6679 save_expr_regs);
6683 if (current_function_instrument_entry_exit)
6685 rtx fun = DECL_RTL (current_function_decl);
6686 if (GET_CODE (fun) == MEM)
6687 fun = XEXP (fun, 0);
6688 else
6689 abort ();
6690 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6691 2, fun, Pmode,
6692 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6694 hard_frame_pointer_rtx),
6695 Pmode);
6698 if (current_function_profile)
6700 current_function_profile_label_no = profile_label_no++;
6701 #ifdef PROFILE_HOOK
6702 PROFILE_HOOK (current_function_profile_label_no);
6703 #endif
6706 /* After the display initializations is where the tail-recursion label
6707 should go, if we end up needing one. Ensure we have a NOTE here
6708 since some things (like trampolines) get placed before this. */
6709 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6711 /* Evaluate now the sizes of any types declared among the arguments. */
6712 expand_pending_sizes (nreverse (get_pending_sizes ()));
6714 /* Make sure there is a line number after the function entry setup code. */
6715 force_next_line_note ();
6718 /* Undo the effects of init_dummy_function_start. */
6719 void
6720 expand_dummy_function_end ()
6722 /* End any sequences that failed to be closed due to syntax errors. */
6723 while (in_sequence_p ())
6724 end_sequence ();
6726 /* Outside function body, can't compute type's actual size
6727 until next function's body starts. */
6729 free_after_parsing (cfun);
6730 free_after_compilation (cfun);
6731 cfun = 0;
6734 /* Call DOIT for each hard register used as a return value from
6735 the current function. */
6737 void
6738 diddle_return_value (doit, arg)
6739 void (*doit) PARAMS ((rtx, void *));
6740 void *arg;
6742 rtx outgoing = current_function_return_rtx;
6744 if (! outgoing)
6745 return;
6747 if (GET_CODE (outgoing) == REG)
6748 (*doit) (outgoing, arg);
6749 else if (GET_CODE (outgoing) == PARALLEL)
6751 int i;
6753 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6755 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6757 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6758 (*doit) (x, arg);
6763 static void
6764 do_clobber_return_reg (reg, arg)
6765 rtx reg;
6766 void *arg ATTRIBUTE_UNUSED;
6768 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6771 void
6772 clobber_return_register ()
6774 diddle_return_value (do_clobber_return_reg, NULL);
6776 /* In case we do use pseudo to return value, clobber it too. */
6777 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6779 tree decl_result = DECL_RESULT (current_function_decl);
6780 rtx decl_rtl = DECL_RTL (decl_result);
6781 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6783 do_clobber_return_reg (decl_rtl, NULL);
6788 static void
6789 do_use_return_reg (reg, arg)
6790 rtx reg;
6791 void *arg ATTRIBUTE_UNUSED;
6793 emit_insn (gen_rtx_USE (VOIDmode, reg));
6796 void
6797 use_return_register ()
6799 diddle_return_value (do_use_return_reg, NULL);
6802 /* Generate RTL for the end of the current function.
6803 FILENAME and LINE are the current position in the source file.
6805 It is up to language-specific callers to do cleanups for parameters--
6806 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6808 void
6809 expand_function_end (filename, line, end_bindings)
6810 const char *filename;
6811 int line;
6812 int end_bindings;
6814 tree link;
6815 rtx clobber_after;
6817 #ifdef TRAMPOLINE_TEMPLATE
6818 static rtx initial_trampoline;
6819 #endif
6821 finish_expr_for_function ();
6823 /* If arg_pointer_save_area was referenced only from a nested
6824 function, we will not have initialized it yet. Do that now. */
6825 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6826 get_arg_pointer_save_area (cfun);
6828 #ifdef NON_SAVING_SETJMP
6829 /* Don't put any variables in registers if we call setjmp
6830 on a machine that fails to restore the registers. */
6831 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6833 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6834 setjmp_protect (DECL_INITIAL (current_function_decl));
6836 setjmp_protect_args ();
6838 #endif
6840 /* Initialize any trampolines required by this function. */
6841 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6843 tree function = TREE_PURPOSE (link);
6844 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6845 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6846 #ifdef TRAMPOLINE_TEMPLATE
6847 rtx blktramp;
6848 #endif
6849 rtx seq;
6851 #ifdef TRAMPOLINE_TEMPLATE
6852 /* First make sure this compilation has a template for
6853 initializing trampolines. */
6854 if (initial_trampoline == 0)
6856 initial_trampoline
6857 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6858 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6860 ggc_add_rtx_root (&initial_trampoline, 1);
6862 #endif
6864 /* Generate insns to initialize the trampoline. */
6865 start_sequence ();
6866 tramp = round_trampoline_addr (XEXP (tramp, 0));
6867 #ifdef TRAMPOLINE_TEMPLATE
6868 blktramp = replace_equiv_address (initial_trampoline, tramp);
6869 emit_block_move (blktramp, initial_trampoline,
6870 GEN_INT (TRAMPOLINE_SIZE));
6871 #endif
6872 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6873 seq = get_insns ();
6874 end_sequence ();
6876 /* Put those insns at entry to the containing function (this one). */
6877 emit_insns_before (seq, tail_recursion_reentry);
6880 /* If we are doing stack checking and this function makes calls,
6881 do a stack probe at the start of the function to ensure we have enough
6882 space for another stack frame. */
6883 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6885 rtx insn, seq;
6887 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6888 if (GET_CODE (insn) == CALL_INSN)
6890 start_sequence ();
6891 probe_stack_range (STACK_CHECK_PROTECT,
6892 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6893 seq = get_insns ();
6894 end_sequence ();
6895 emit_insns_before (seq, tail_recursion_reentry);
6896 break;
6900 /* Warn about unused parms if extra warnings were specified. */
6901 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6902 warning. WARN_UNUSED_PARAMETER is negative when set by
6903 -Wunused. */
6904 if (warn_unused_parameter > 0
6905 || (warn_unused_parameter < 0 && extra_warnings))
6907 tree decl;
6909 for (decl = DECL_ARGUMENTS (current_function_decl);
6910 decl; decl = TREE_CHAIN (decl))
6911 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6912 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6913 warning_with_decl (decl, "unused parameter `%s'");
6916 /* Delete handlers for nonlocal gotos if nothing uses them. */
6917 if (nonlocal_goto_handler_slots != 0
6918 && ! current_function_has_nonlocal_label)
6919 delete_handlers ();
6921 /* End any sequences that failed to be closed due to syntax errors. */
6922 while (in_sequence_p ())
6923 end_sequence ();
6925 /* Outside function body, can't compute type's actual size
6926 until next function's body starts. */
6927 immediate_size_expand--;
6929 clear_pending_stack_adjust ();
6930 do_pending_stack_adjust ();
6932 /* Mark the end of the function body.
6933 If control reaches this insn, the function can drop through
6934 without returning a value. */
6935 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6937 /* Must mark the last line number note in the function, so that the test
6938 coverage code can avoid counting the last line twice. This just tells
6939 the code to ignore the immediately following line note, since there
6940 already exists a copy of this note somewhere above. This line number
6941 note is still needed for debugging though, so we can't delete it. */
6942 if (flag_test_coverage)
6943 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6945 /* Output a linenumber for the end of the function.
6946 SDB depends on this. */
6947 emit_line_note_force (filename, line);
6949 /* Before the return label (if any), clobber the return
6950 registers so that they are not propagated live to the rest of
6951 the function. This can only happen with functions that drop
6952 through; if there had been a return statement, there would
6953 have either been a return rtx, or a jump to the return label.
6955 We delay actual code generation after the current_function_value_rtx
6956 is computed. */
6957 clobber_after = get_last_insn ();
6959 /* Output the label for the actual return from the function,
6960 if one is expected. This happens either because a function epilogue
6961 is used instead of a return instruction, or because a return was done
6962 with a goto in order to run local cleanups, or because of pcc-style
6963 structure returning. */
6964 if (return_label)
6965 emit_label (return_label);
6967 /* C++ uses this. */
6968 if (end_bindings)
6969 expand_end_bindings (0, 0, 0);
6971 if (current_function_instrument_entry_exit)
6973 rtx fun = DECL_RTL (current_function_decl);
6974 if (GET_CODE (fun) == MEM)
6975 fun = XEXP (fun, 0);
6976 else
6977 abort ();
6978 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6979 2, fun, Pmode,
6980 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6982 hard_frame_pointer_rtx),
6983 Pmode);
6986 /* Let except.c know where it should emit the call to unregister
6987 the function context for sjlj exceptions. */
6988 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6989 sjlj_emit_function_exit_after (get_last_insn ());
6991 /* If we had calls to alloca, and this machine needs
6992 an accurate stack pointer to exit the function,
6993 insert some code to save and restore the stack pointer. */
6994 #ifdef EXIT_IGNORE_STACK
6995 if (! EXIT_IGNORE_STACK)
6996 #endif
6997 if (current_function_calls_alloca)
6999 rtx tem = 0;
7001 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7002 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7005 /* If scalar return value was computed in a pseudo-reg, or was a named
7006 return value that got dumped to the stack, copy that to the hard
7007 return register. */
7008 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7010 tree decl_result = DECL_RESULT (current_function_decl);
7011 rtx decl_rtl = DECL_RTL (decl_result);
7013 if (REG_P (decl_rtl)
7014 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7015 : DECL_REGISTER (decl_result))
7017 rtx real_decl_rtl = current_function_return_rtx;
7019 /* This should be set in assign_parms. */
7020 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7021 abort ();
7023 /* If this is a BLKmode structure being returned in registers,
7024 then use the mode computed in expand_return. Note that if
7025 decl_rtl is memory, then its mode may have been changed,
7026 but that current_function_return_rtx has not. */
7027 if (GET_MODE (real_decl_rtl) == BLKmode)
7028 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7030 /* If a named return value dumped decl_return to memory, then
7031 we may need to re-do the PROMOTE_MODE signed/unsigned
7032 extension. */
7033 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7035 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7037 #ifdef PROMOTE_FUNCTION_RETURN
7038 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7039 &unsignedp, 1);
7040 #endif
7042 convert_move (real_decl_rtl, decl_rtl, unsignedp);
7044 else if (GET_CODE (real_decl_rtl) == PARALLEL)
7045 emit_group_load (real_decl_rtl, decl_rtl,
7046 int_size_in_bytes (TREE_TYPE (decl_result)));
7047 else
7048 emit_move_insn (real_decl_rtl, decl_rtl);
7052 /* If returning a structure, arrange to return the address of the value
7053 in a place where debuggers expect to find it.
7055 If returning a structure PCC style,
7056 the caller also depends on this value.
7057 And current_function_returns_pcc_struct is not necessarily set. */
7058 if (current_function_returns_struct
7059 || current_function_returns_pcc_struct)
7061 rtx value_address
7062 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7063 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7064 #ifdef FUNCTION_OUTGOING_VALUE
7065 rtx outgoing
7066 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7067 current_function_decl);
7068 #else
7069 rtx outgoing
7070 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7071 #endif
7073 /* Mark this as a function return value so integrate will delete the
7074 assignment and USE below when inlining this function. */
7075 REG_FUNCTION_VALUE_P (outgoing) = 1;
7077 #ifdef POINTERS_EXTEND_UNSIGNED
7078 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7079 if (GET_MODE (outgoing) != GET_MODE (value_address))
7080 value_address = convert_memory_address (GET_MODE (outgoing),
7081 value_address);
7082 #endif
7084 emit_move_insn (outgoing, value_address);
7086 /* Show return register used to hold result (in this case the address
7087 of the result. */
7088 current_function_return_rtx = outgoing;
7091 /* If this is an implementation of throw, do what's necessary to
7092 communicate between __builtin_eh_return and the epilogue. */
7093 expand_eh_return ();
7095 /* Emit the actual code to clobber return register. */
7097 rtx seq, after;
7099 start_sequence ();
7100 clobber_return_register ();
7101 seq = gen_sequence ();
7102 end_sequence ();
7104 after = emit_insn_after (seq, clobber_after);
7106 if (clobber_after != after)
7107 cfun->x_clobber_return_insn = after;
7110 /* ??? This should no longer be necessary since stupid is no longer with
7111 us, but there are some parts of the compiler (eg reload_combine, and
7112 sh mach_dep_reorg) that still try and compute their own lifetime info
7113 instead of using the general framework. */
7114 use_return_register ();
7116 /* Fix up any gotos that jumped out to the outermost
7117 binding level of the function.
7118 Must follow emitting RETURN_LABEL. */
7120 /* If you have any cleanups to do at this point,
7121 and they need to create temporary variables,
7122 then you will lose. */
7123 expand_fixups (get_insns ());
7127 get_arg_pointer_save_area (f)
7128 struct function *f;
7130 rtx ret = f->x_arg_pointer_save_area;
7132 if (! ret)
7134 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7135 f->x_arg_pointer_save_area = ret;
7138 if (f == cfun && ! f->arg_pointer_save_area_init)
7140 rtx seq;
7142 /* Save the arg pointer at the beginning of the function. The
7143 generated stack slot may not be a valid memory address, so we
7144 have to check it and fix it if necessary. */
7145 start_sequence ();
7146 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7147 seq = gen_sequence ();
7148 end_sequence ();
7150 push_topmost_sequence ();
7151 emit_insn_after (seq, get_insns ());
7152 pop_topmost_sequence ();
7155 return ret;
7158 /* Extend a vector that records the INSN_UIDs of INSNS (either a
7159 sequence or a single insn). */
7161 static void
7162 record_insns (insns, vecp)
7163 rtx insns;
7164 varray_type *vecp;
7166 if (GET_CODE (insns) == SEQUENCE)
7168 int len = XVECLEN (insns, 0);
7169 int i = VARRAY_SIZE (*vecp);
7171 VARRAY_GROW (*vecp, i + len);
7172 while (--len >= 0)
7174 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
7175 ++i;
7178 else
7180 int i = VARRAY_SIZE (*vecp);
7181 VARRAY_GROW (*vecp, i + 1);
7182 VARRAY_INT (*vecp, i) = INSN_UID (insns);
7186 /* Determine how many INSN_UIDs in VEC are part of INSN. */
7188 static int
7189 contains (insn, vec)
7190 rtx insn;
7191 varray_type vec;
7193 int i, j;
7195 if (GET_CODE (insn) == INSN
7196 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7198 int count = 0;
7199 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7200 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7201 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7202 count++;
7203 return count;
7205 else
7207 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7208 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7209 return 1;
7211 return 0;
7215 prologue_epilogue_contains (insn)
7216 rtx insn;
7218 if (contains (insn, prologue))
7219 return 1;
7220 if (contains (insn, epilogue))
7221 return 1;
7222 return 0;
7226 sibcall_epilogue_contains (insn)
7227 rtx insn;
7229 if (sibcall_epilogue)
7230 return contains (insn, sibcall_epilogue);
7231 return 0;
7234 #ifdef HAVE_return
7235 /* Insert gen_return at the end of block BB. This also means updating
7236 block_for_insn appropriately. */
7238 static void
7239 emit_return_into_block (bb, line_note)
7240 basic_block bb;
7241 rtx line_note;
7243 rtx p, end;
7245 p = NEXT_INSN (bb->end);
7246 end = emit_jump_insn_after (gen_return (), bb->end);
7247 if (line_note)
7248 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7249 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7251 #endif /* HAVE_return */
7253 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7255 /* These functions convert the epilogue into a variant that does not modify the
7256 stack pointer. This is used in cases where a function returns an object
7257 whose size is not known until it is computed. The called function leaves the
7258 object on the stack, leaves the stack depressed, and returns a pointer to
7259 the object.
7261 What we need to do is track all modifications and references to the stack
7262 pointer, deleting the modifications and changing the references to point to
7263 the location the stack pointer would have pointed to had the modifications
7264 taken place.
7266 These functions need to be portable so we need to make as few assumptions
7267 about the epilogue as we can. However, the epilogue basically contains
7268 three things: instructions to reset the stack pointer, instructions to
7269 reload registers, possibly including the frame pointer, and an
7270 instruction to return to the caller.
7272 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7273 We also make no attempt to validate the insns we make since if they are
7274 invalid, we probably can't do anything valid. The intent is that these
7275 routines get "smarter" as more and more machines start to use them and
7276 they try operating on different epilogues.
7278 We use the following structure to track what the part of the epilogue that
7279 we've already processed has done. We keep two copies of the SP equivalence,
7280 one for use during the insn we are processing and one for use in the next
7281 insn. The difference is because one part of a PARALLEL may adjust SP
7282 and the other may use it. */
7284 struct epi_info
7286 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7287 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7288 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7289 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7290 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7291 should be set to once we no longer need
7292 its value. */
7295 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7296 static void emit_equiv_load PARAMS ((struct epi_info *));
7298 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7299 to the stack pointer. Return the new sequence. */
7301 static rtx
7302 keep_stack_depressed (seq)
7303 rtx seq;
7305 int i, j;
7306 struct epi_info info;
7308 /* If the epilogue is just a single instruction, it ust be OK as is. */
7310 if (GET_CODE (seq) != SEQUENCE)
7311 return seq;
7313 /* Otherwise, start a sequence, initialize the information we have, and
7314 process all the insns we were given. */
7315 start_sequence ();
7317 info.sp_equiv_reg = stack_pointer_rtx;
7318 info.sp_offset = 0;
7319 info.equiv_reg_src = 0;
7321 for (i = 0; i < XVECLEN (seq, 0); i++)
7323 rtx insn = XVECEXP (seq, 0, i);
7325 if (!INSN_P (insn))
7327 add_insn (insn);
7328 continue;
7331 /* If this insn references the register that SP is equivalent to and
7332 we have a pending load to that register, we must force out the load
7333 first and then indicate we no longer know what SP's equivalent is. */
7334 if (info.equiv_reg_src != 0
7335 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7337 emit_equiv_load (&info);
7338 info.sp_equiv_reg = 0;
7341 info.new_sp_equiv_reg = info.sp_equiv_reg;
7342 info.new_sp_offset = info.sp_offset;
7344 /* If this is a (RETURN) and the return address is on the stack,
7345 update the address and change to an indirect jump. */
7346 if (GET_CODE (PATTERN (insn)) == RETURN
7347 || (GET_CODE (PATTERN (insn)) == PARALLEL
7348 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7350 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7351 rtx base = 0;
7352 HOST_WIDE_INT offset = 0;
7353 rtx jump_insn, jump_set;
7355 /* If the return address is in a register, we can emit the insn
7356 unchanged. Otherwise, it must be a MEM and we see what the
7357 base register and offset are. In any case, we have to emit any
7358 pending load to the equivalent reg of SP, if any. */
7359 if (GET_CODE (retaddr) == REG)
7361 emit_equiv_load (&info);
7362 add_insn (insn);
7363 continue;
7365 else if (GET_CODE (retaddr) == MEM
7366 && GET_CODE (XEXP (retaddr, 0)) == REG)
7367 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7368 else if (GET_CODE (retaddr) == MEM
7369 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7370 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7371 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7373 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7374 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7376 else
7377 abort ();
7379 /* If the base of the location containing the return pointer
7380 is SP, we must update it with the replacement address. Otherwise,
7381 just build the necessary MEM. */
7382 retaddr = plus_constant (base, offset);
7383 if (base == stack_pointer_rtx)
7384 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7385 plus_constant (info.sp_equiv_reg,
7386 info.sp_offset));
7388 retaddr = gen_rtx_MEM (Pmode, retaddr);
7390 /* If there is a pending load to the equivalent register for SP
7391 and we reference that register, we must load our address into
7392 a scratch register and then do that load. */
7393 if (info.equiv_reg_src
7394 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7396 unsigned int regno;
7397 rtx reg;
7399 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7400 if (HARD_REGNO_MODE_OK (regno, Pmode)
7401 && !fixed_regs[regno]
7402 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7403 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7404 regno)
7405 && !refers_to_regno_p (regno,
7406 regno + HARD_REGNO_NREGS (regno,
7407 Pmode),
7408 info.equiv_reg_src, NULL))
7409 break;
7411 if (regno == FIRST_PSEUDO_REGISTER)
7412 abort ();
7414 reg = gen_rtx_REG (Pmode, regno);
7415 emit_move_insn (reg, retaddr);
7416 retaddr = reg;
7419 emit_equiv_load (&info);
7420 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7422 /* Show the SET in the above insn is a RETURN. */
7423 jump_set = single_set (jump_insn);
7424 if (jump_set == 0)
7425 abort ();
7426 else
7427 SET_IS_RETURN_P (jump_set) = 1;
7430 /* If SP is not mentioned in the pattern and its equivalent register, if
7431 any, is not modified, just emit it. Otherwise, if neither is set,
7432 replace the reference to SP and emit the insn. If none of those are
7433 true, handle each SET individually. */
7434 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7435 && (info.sp_equiv_reg == stack_pointer_rtx
7436 || !reg_set_p (info.sp_equiv_reg, insn)))
7437 add_insn (insn);
7438 else if (! reg_set_p (stack_pointer_rtx, insn)
7439 && (info.sp_equiv_reg == stack_pointer_rtx
7440 || !reg_set_p (info.sp_equiv_reg, insn)))
7442 if (! validate_replace_rtx (stack_pointer_rtx,
7443 plus_constant (info.sp_equiv_reg,
7444 info.sp_offset),
7445 insn))
7446 abort ();
7448 add_insn (insn);
7450 else if (GET_CODE (PATTERN (insn)) == SET)
7451 handle_epilogue_set (PATTERN (insn), &info);
7452 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7454 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7455 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7456 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7458 else
7459 add_insn (insn);
7461 info.sp_equiv_reg = info.new_sp_equiv_reg;
7462 info.sp_offset = info.new_sp_offset;
7465 seq = gen_sequence ();
7466 end_sequence ();
7467 return seq;
7470 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7471 structure that contains information about what we've seen so far. We
7472 process this SET by either updating that data or by emitting one or
7473 more insns. */
7475 static void
7476 handle_epilogue_set (set, p)
7477 rtx set;
7478 struct epi_info *p;
7480 /* First handle the case where we are setting SP. Record what it is being
7481 set from. If unknown, abort. */
7482 if (reg_set_p (stack_pointer_rtx, set))
7484 if (SET_DEST (set) != stack_pointer_rtx)
7485 abort ();
7487 if (GET_CODE (SET_SRC (set)) == PLUS
7488 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7490 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7491 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7493 else
7494 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7496 /* If we are adjusting SP, we adjust from the old data. */
7497 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7499 p->new_sp_equiv_reg = p->sp_equiv_reg;
7500 p->new_sp_offset += p->sp_offset;
7503 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7504 abort ();
7506 return;
7509 /* Next handle the case where we are setting SP's equivalent register.
7510 If we already have a value to set it to, abort. We could update, but
7511 there seems little point in handling that case. Note that we have
7512 to allow for the case where we are setting the register set in
7513 the previous part of a PARALLEL inside a single insn. But use the
7514 old offset for any updates within this insn. */
7515 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7517 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7518 || p->equiv_reg_src != 0)
7519 abort ();
7520 else
7521 p->equiv_reg_src
7522 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7523 plus_constant (p->sp_equiv_reg,
7524 p->sp_offset));
7527 /* Otherwise, replace any references to SP in the insn to its new value
7528 and emit the insn. */
7529 else
7531 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7532 plus_constant (p->sp_equiv_reg,
7533 p->sp_offset));
7534 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7535 plus_constant (p->sp_equiv_reg,
7536 p->sp_offset));
7537 emit_insn (set);
7541 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7543 static void
7544 emit_equiv_load (p)
7545 struct epi_info *p;
7547 if (p->equiv_reg_src != 0)
7548 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7550 p->equiv_reg_src = 0;
7552 #endif
7554 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7555 this into place with notes indicating where the prologue ends and where
7556 the epilogue begins. Update the basic block information when possible. */
7558 void
7559 thread_prologue_and_epilogue_insns (f)
7560 rtx f ATTRIBUTE_UNUSED;
7562 int inserted = 0;
7563 edge e;
7564 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7565 rtx seq;
7566 #endif
7567 #ifdef HAVE_prologue
7568 rtx prologue_end = NULL_RTX;
7569 #endif
7570 #if defined (HAVE_epilogue) || defined(HAVE_return)
7571 rtx epilogue_end = NULL_RTX;
7572 #endif
7574 #ifdef HAVE_prologue
7575 if (HAVE_prologue)
7577 start_sequence ();
7578 seq = gen_prologue ();
7579 emit_insn (seq);
7581 /* Retain a map of the prologue insns. */
7582 if (GET_CODE (seq) != SEQUENCE)
7583 seq = get_insns ();
7584 record_insns (seq, &prologue);
7585 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7587 seq = gen_sequence ();
7588 end_sequence ();
7590 /* Can't deal with multiple successors of the entry block
7591 at the moment. Function should always have at least one
7592 entry point. */
7593 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7594 abort ();
7596 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7597 inserted = 1;
7599 #endif
7601 /* If the exit block has no non-fake predecessors, we don't need
7602 an epilogue. */
7603 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7604 if ((e->flags & EDGE_FAKE) == 0)
7605 break;
7606 if (e == NULL)
7607 goto epilogue_done;
7609 #ifdef HAVE_return
7610 if (optimize && HAVE_return)
7612 /* If we're allowed to generate a simple return instruction,
7613 then by definition we don't need a full epilogue. Examine
7614 the block that falls through to EXIT. If it does not
7615 contain any code, examine its predecessors and try to
7616 emit (conditional) return instructions. */
7618 basic_block last;
7619 edge e_next;
7620 rtx label;
7622 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7623 if (e->flags & EDGE_FALLTHRU)
7624 break;
7625 if (e == NULL)
7626 goto epilogue_done;
7627 last = e->src;
7629 /* Verify that there are no active instructions in the last block. */
7630 label = last->end;
7631 while (label && GET_CODE (label) != CODE_LABEL)
7633 if (active_insn_p (label))
7634 break;
7635 label = PREV_INSN (label);
7638 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7640 rtx epilogue_line_note = NULL_RTX;
7642 /* Locate the line number associated with the closing brace,
7643 if we can find one. */
7644 for (seq = get_last_insn ();
7645 seq && ! active_insn_p (seq);
7646 seq = PREV_INSN (seq))
7647 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7649 epilogue_line_note = seq;
7650 break;
7653 for (e = last->pred; e; e = e_next)
7655 basic_block bb = e->src;
7656 rtx jump;
7658 e_next = e->pred_next;
7659 if (bb == ENTRY_BLOCK_PTR)
7660 continue;
7662 jump = bb->end;
7663 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7664 continue;
7666 /* If we have an unconditional jump, we can replace that
7667 with a simple return instruction. */
7668 if (simplejump_p (jump))
7670 emit_return_into_block (bb, epilogue_line_note);
7671 delete_insn (jump);
7674 /* If we have a conditional jump, we can try to replace
7675 that with a conditional return instruction. */
7676 else if (condjump_p (jump))
7678 rtx ret, *loc;
7680 ret = SET_SRC (PATTERN (jump));
7681 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7682 loc = &XEXP (ret, 1);
7683 else
7684 loc = &XEXP (ret, 2);
7685 ret = gen_rtx_RETURN (VOIDmode);
7687 if (! validate_change (jump, loc, ret, 0))
7688 continue;
7689 if (JUMP_LABEL (jump))
7690 LABEL_NUSES (JUMP_LABEL (jump))--;
7692 /* If this block has only one successor, it both jumps
7693 and falls through to the fallthru block, so we can't
7694 delete the edge. */
7695 if (bb->succ->succ_next == NULL)
7696 continue;
7698 else
7699 continue;
7701 /* Fix up the CFG for the successful change we just made. */
7702 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7705 /* Emit a return insn for the exit fallthru block. Whether
7706 this is still reachable will be determined later. */
7708 emit_barrier_after (last->end);
7709 emit_return_into_block (last, epilogue_line_note);
7710 epilogue_end = last->end;
7711 last->succ->flags &= ~EDGE_FALLTHRU;
7712 goto epilogue_done;
7715 #endif
7716 #ifdef HAVE_epilogue
7717 if (HAVE_epilogue)
7719 /* Find the edge that falls through to EXIT. Other edges may exist
7720 due to RETURN instructions, but those don't need epilogues.
7721 There really shouldn't be a mixture -- either all should have
7722 been converted or none, however... */
7724 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7725 if (e->flags & EDGE_FALLTHRU)
7726 break;
7727 if (e == NULL)
7728 goto epilogue_done;
7730 start_sequence ();
7731 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7733 seq = gen_epilogue ();
7735 #ifdef INCOMING_RETURN_ADDR_RTX
7736 /* If this function returns with the stack depressed and we can support
7737 it, massage the epilogue to actually do that. */
7738 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7739 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7740 seq = keep_stack_depressed (seq);
7741 #endif
7743 emit_jump_insn (seq);
7745 /* Retain a map of the epilogue insns. */
7746 if (GET_CODE (seq) != SEQUENCE)
7747 seq = get_insns ();
7748 record_insns (seq, &epilogue);
7750 seq = gen_sequence ();
7751 end_sequence ();
7753 insert_insn_on_edge (seq, e);
7754 inserted = 1;
7756 #endif
7757 epilogue_done:
7759 if (inserted)
7760 commit_edge_insertions ();
7762 #ifdef HAVE_sibcall_epilogue
7763 /* Emit sibling epilogues before any sibling call sites. */
7764 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7766 basic_block bb = e->src;
7767 rtx insn = bb->end;
7768 rtx i;
7769 rtx newinsn;
7771 if (GET_CODE (insn) != CALL_INSN
7772 || ! SIBLING_CALL_P (insn))
7773 continue;
7775 start_sequence ();
7776 seq = gen_sibcall_epilogue ();
7777 end_sequence ();
7779 i = PREV_INSN (insn);
7780 newinsn = emit_insn_before (seq, insn);
7782 /* Retain a map of the epilogue insns. Used in life analysis to
7783 avoid getting rid of sibcall epilogue insns. */
7784 record_insns (GET_CODE (seq) == SEQUENCE
7785 ? seq : newinsn, &sibcall_epilogue);
7787 #endif
7789 #ifdef HAVE_prologue
7790 if (prologue_end)
7792 rtx insn, prev;
7794 /* GDB handles `break f' by setting a breakpoint on the first
7795 line note after the prologue. Which means (1) that if
7796 there are line number notes before where we inserted the
7797 prologue we should move them, and (2) we should generate a
7798 note before the end of the first basic block, if there isn't
7799 one already there.
7801 ??? This behaviour is completely broken when dealing with
7802 multiple entry functions. We simply place the note always
7803 into first basic block and let alternate entry points
7804 to be missed.
7807 for (insn = prologue_end; insn; insn = prev)
7809 prev = PREV_INSN (insn);
7810 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7812 /* Note that we cannot reorder the first insn in the
7813 chain, since rest_of_compilation relies on that
7814 remaining constant. */
7815 if (prev == NULL)
7816 break;
7817 reorder_insns (insn, insn, prologue_end);
7821 /* Find the last line number note in the first block. */
7822 for (insn = BASIC_BLOCK (0)->end;
7823 insn != prologue_end && insn;
7824 insn = PREV_INSN (insn))
7825 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7826 break;
7828 /* If we didn't find one, make a copy of the first line number
7829 we run across. */
7830 if (! insn)
7832 for (insn = next_active_insn (prologue_end);
7833 insn;
7834 insn = PREV_INSN (insn))
7835 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7837 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7838 NOTE_LINE_NUMBER (insn),
7839 prologue_end);
7840 break;
7844 #endif
7845 #ifdef HAVE_epilogue
7846 if (epilogue_end)
7848 rtx insn, next;
7850 /* Similarly, move any line notes that appear after the epilogue.
7851 There is no need, however, to be quite so anal about the existence
7852 of such a note. */
7853 for (insn = epilogue_end; insn; insn = next)
7855 next = NEXT_INSN (insn);
7856 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7857 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7860 #endif
7863 /* Reposition the prologue-end and epilogue-begin notes after instruction
7864 scheduling and delayed branch scheduling. */
7866 void
7867 reposition_prologue_and_epilogue_notes (f)
7868 rtx f ATTRIBUTE_UNUSED;
7870 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7871 rtx insn, last, note;
7872 int len;
7874 if ((len = VARRAY_SIZE (prologue)) > 0)
7876 last = 0, note = 0;
7878 /* Scan from the beginning until we reach the last prologue insn.
7879 We apparently can't depend on basic_block_{head,end} after
7880 reorg has run. */
7881 for (insn = f; insn; insn = NEXT_INSN (insn))
7883 if (GET_CODE (insn) == NOTE)
7885 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7886 note = insn;
7888 else if (contains (insn, prologue))
7890 last = insn;
7891 if (--len == 0)
7892 break;
7896 if (last)
7898 rtx next;
7900 /* Find the prologue-end note if we haven't already, and
7901 move it to just after the last prologue insn. */
7902 if (note == 0)
7904 for (note = last; (note = NEXT_INSN (note));)
7905 if (GET_CODE (note) == NOTE
7906 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7907 break;
7910 next = NEXT_INSN (note);
7912 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7913 if (GET_CODE (last) == CODE_LABEL)
7914 last = NEXT_INSN (last);
7915 reorder_insns (note, note, last);
7919 if ((len = VARRAY_SIZE (epilogue)) > 0)
7921 last = 0, note = 0;
7923 /* Scan from the end until we reach the first epilogue insn.
7924 We apparently can't depend on basic_block_{head,end} after
7925 reorg has run. */
7926 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7928 if (GET_CODE (insn) == NOTE)
7930 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7931 note = insn;
7933 else if (contains (insn, epilogue))
7935 last = insn;
7936 if (--len == 0)
7937 break;
7941 if (last)
7943 /* Find the epilogue-begin note if we haven't already, and
7944 move it to just before the first epilogue insn. */
7945 if (note == 0)
7947 for (note = insn; (note = PREV_INSN (note));)
7948 if (GET_CODE (note) == NOTE
7949 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7950 break;
7953 if (PREV_INSN (last) != note)
7954 reorder_insns (note, note, PREV_INSN (last));
7957 #endif /* HAVE_prologue or HAVE_epilogue */
7960 /* Mark P for GC. */
7962 static void
7963 mark_function_status (p)
7964 struct function *p;
7966 struct var_refs_queue *q;
7967 struct temp_slot *t;
7968 int i;
7969 rtx *r;
7971 if (p == 0)
7972 return;
7974 ggc_mark_rtx (p->arg_offset_rtx);
7976 if (p->x_parm_reg_stack_loc)
7977 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7978 i > 0; --i, ++r)
7979 ggc_mark_rtx (*r);
7981 ggc_mark_rtx (p->return_rtx);
7982 ggc_mark_rtx (p->x_cleanup_label);
7983 ggc_mark_rtx (p->x_return_label);
7984 ggc_mark_rtx (p->x_save_expr_regs);
7985 ggc_mark_rtx (p->x_stack_slot_list);
7986 ggc_mark_rtx (p->x_parm_birth_insn);
7987 ggc_mark_rtx (p->x_tail_recursion_label);
7988 ggc_mark_rtx (p->x_tail_recursion_reentry);
7989 ggc_mark_rtx (p->internal_arg_pointer);
7990 ggc_mark_rtx (p->x_arg_pointer_save_area);
7991 ggc_mark_tree (p->x_rtl_expr_chain);
7992 ggc_mark_rtx (p->x_last_parm_insn);
7993 ggc_mark_tree (p->x_context_display);
7994 ggc_mark_tree (p->x_trampoline_list);
7995 ggc_mark_rtx (p->epilogue_delay_list);
7996 ggc_mark_rtx (p->x_clobber_return_insn);
7998 for (t = p->x_temp_slots; t != 0; t = t->next)
8000 ggc_mark (t);
8001 ggc_mark_rtx (t->slot);
8002 ggc_mark_rtx (t->address);
8003 ggc_mark_tree (t->rtl_expr);
8004 ggc_mark_tree (t->type);
8007 for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
8009 ggc_mark (q);
8010 ggc_mark_rtx (q->modified);
8013 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
8014 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
8015 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
8016 ggc_mark_tree (p->x_nonlocal_labels);
8018 mark_hard_reg_initial_vals (p);
8021 /* Mark the struct function pointed to by *ARG for GC, if it is not
8022 NULL. This is used to mark the current function and the outer
8023 function chain. */
8025 static void
8026 maybe_mark_struct_function (arg)
8027 void *arg;
8029 struct function *f = *(struct function **) arg;
8031 if (f == 0)
8032 return;
8034 ggc_mark_struct_function (f);
8037 /* Mark a struct function * for GC. This is called from ggc-common.c. */
8039 void
8040 ggc_mark_struct_function (f)
8041 struct function *f;
8043 ggc_mark (f);
8044 ggc_mark_tree (f->decl);
8046 mark_function_status (f);
8047 mark_eh_status (f->eh);
8048 mark_stmt_status (f->stmt);
8049 mark_expr_status (f->expr);
8050 mark_emit_status (f->emit);
8051 mark_varasm_status (f->varasm);
8053 if (mark_machine_status)
8054 (*mark_machine_status) (f);
8055 if (mark_lang_status)
8056 (*mark_lang_status) (f);
8058 if (f->original_arg_vector)
8059 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
8060 if (f->original_decl_initial)
8061 ggc_mark_tree (f->original_decl_initial);
8062 if (f->outer)
8063 ggc_mark_struct_function (f->outer);
8066 /* Called once, at initialization, to initialize function.c. */
8068 void
8069 init_function_once ()
8071 ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
8072 ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
8073 maybe_mark_struct_function);
8075 VARRAY_INT_INIT (prologue, 0, "prologue");
8076 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8077 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");