* config/rs6000/rs6000.h (SPU_CONST_OFFSET_OK): Change to 0xff.
[official-gcc.git] / gcc / function.c
blob98c1965870846f7e5b64afa32064578f45955756
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hashtab.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
62 #include "langhooks.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
76 #ifndef NAME__MAIN
77 #define NAME__MAIN "__main"
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't contain any instructions
106 that can throw an exception. This is set prior to final. */
108 int current_function_nothrow;
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero.
122 calls.c:emit_library_call_value_1 uses it to set up
123 post-instantiation libcalls. */
124 int virtuals_instantiated;
126 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
127 static int funcdef_no;
129 /* These variables hold pointers to functions to create and destroy
130 target specific, per-function data structures. */
131 struct machine_function * (*init_machine_status) PARAMS ((void));
133 /* The FUNCTION_DECL for an inline function currently being expanded. */
134 tree inline_function_decl;
136 /* The currently compiled function. */
137 struct function *cfun = 0;
139 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
140 static GTY(()) varray_type prologue;
141 static GTY(()) varray_type epilogue;
143 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
144 in this function. */
145 static GTY(()) varray_type sibcall_epilogue;
147 /* In order to evaluate some expressions, such as function calls returning
148 structures in memory, we need to temporarily allocate stack locations.
149 We record each allocated temporary in the following structure.
151 Associated with each temporary slot is a nesting level. When we pop up
152 one level, all temporaries associated with the previous level are freed.
153 Normally, all temporaries are freed after the execution of the statement
154 in which they were created. However, if we are inside a ({...}) grouping,
155 the result may be in a temporary and hence must be preserved. If the
156 result could be in a temporary, we preserve it if we can determine which
157 one it is in. If we cannot determine which temporary may contain the
158 result, all temporaries are preserved. A temporary is preserved by
159 pretending it was allocated at the previous nesting level.
161 Automatic variables are also assigned temporary slots, at the nesting
162 level where they are defined. They are marked a "kept" so that
163 free_temp_slots will not free them. */
165 struct temp_slot GTY(())
167 /* Points to next temporary slot. */
168 struct temp_slot *next;
169 /* The rtx to used to reference the slot. */
170 rtx slot;
171 /* The rtx used to represent the address if not the address of the
172 slot above. May be an EXPR_LIST if multiple addresses exist. */
173 rtx address;
174 /* The alignment (in bits) of the slot. */
175 unsigned int align;
176 /* The size, in units, of the slot. */
177 HOST_WIDE_INT size;
178 /* The type of the object in the slot, or zero if it doesn't correspond
179 to a type. We use this to determine whether a slot can be reused.
180 It can be reused if objects of the type of the new slot will always
181 conflict with objects of the type of the old slot. */
182 tree type;
183 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
184 tree rtl_expr;
185 /* Non-zero if this temporary is currently in use. */
186 char in_use;
187 /* Non-zero if this temporary has its address taken. */
188 char addr_taken;
189 /* Nesting level at which this slot is being used. */
190 int level;
191 /* Non-zero if this should survive a call to free_temp_slots. */
192 int keep;
193 /* The offset of the slot from the frame_pointer, including extra space
194 for alignment. This info is for combine_temp_slots. */
195 HOST_WIDE_INT base_offset;
196 /* The size of the slot, including extra space for alignment. This
197 info is for combine_temp_slots. */
198 HOST_WIDE_INT full_size;
201 /* This structure is used to record MEMs or pseudos used to replace VAR, any
202 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
203 maintain this list in case two operands of an insn were required to match;
204 in that case we must ensure we use the same replacement. */
206 struct fixup_replacement GTY(())
208 rtx old;
209 rtx new;
210 struct fixup_replacement *next;
213 struct insns_for_mem_entry
215 /* A MEM. */
216 rtx key;
217 /* These are the INSNs which reference the MEM. */
218 rtx insns;
221 /* Forward declarations. */
223 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
224 int, struct function *));
225 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
226 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
227 enum machine_mode, enum machine_mode,
228 int, unsigned int, int,
229 htab_t));
230 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
231 enum machine_mode,
232 htab_t));
233 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
234 htab_t));
235 static struct fixup_replacement
236 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
237 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
238 int, int, rtx));
239 static void fixup_var_refs_insns_with_hash
240 PARAMS ((htab_t, rtx,
241 enum machine_mode, int, rtx));
242 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
243 int, int, rtx));
244 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
245 struct fixup_replacement **, rtx));
246 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
247 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
248 int));
249 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
250 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
251 static void instantiate_decls PARAMS ((tree, int));
252 static void instantiate_decls_1 PARAMS ((tree, int));
253 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
254 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
255 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
256 static void delete_handlers PARAMS ((void));
257 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
258 struct args_size *));
259 #ifndef ARGS_GROW_DOWNWARD
260 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
261 tree));
262 #endif
263 static rtx round_trampoline_addr PARAMS ((rtx));
264 static rtx adjust_trampoline_addr PARAMS ((rtx));
265 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
266 static void reorder_blocks_0 PARAMS ((tree));
267 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
268 static void reorder_fix_fragments PARAMS ((tree));
269 static tree blocks_nreverse PARAMS ((tree));
270 static int all_blocks PARAMS ((tree, tree *));
271 static tree *get_block_vector PARAMS ((tree, int *));
272 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
273 /* We always define `record_insns' even if its not used so that we
274 can always export `prologue_epilogue_contains'. */
275 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
276 static int contains PARAMS ((rtx, varray_type));
277 #ifdef HAVE_return
278 static void emit_return_into_block PARAMS ((basic_block, rtx));
279 #endif
280 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
281 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
282 htab_t));
283 static void purge_single_hard_subreg_set PARAMS ((rtx));
284 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
285 static rtx keep_stack_depressed PARAMS ((rtx));
286 #endif
287 static int is_addressof PARAMS ((rtx *, void *));
288 static hashval_t insns_for_mem_hash PARAMS ((const void *));
289 static int insns_for_mem_comp PARAMS ((const void *, const void *));
290 static int insns_for_mem_walk PARAMS ((rtx *, void *));
291 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
292 static void prepare_function_start PARAMS ((void));
293 static void do_clobber_return_reg PARAMS ((rtx, void *));
294 static void do_use_return_reg PARAMS ((rtx, void *));
296 /* Pointer to chain of `struct function' for containing functions. */
297 static GTY(()) struct function *outer_function_chain;
299 /* Given a function decl for a containing function,
300 return the `struct function' for it. */
302 struct function *
303 find_function_data (decl)
304 tree decl;
306 struct function *p;
308 for (p = outer_function_chain; p; p = p->outer)
309 if (p->decl == decl)
310 return p;
312 abort ();
315 /* Save the current context for compilation of a nested function.
316 This is called from language-specific code. The caller should use
317 the enter_nested langhook to save any language-specific state,
318 since this function knows only about language-independent
319 variables. */
321 void
322 push_function_context_to (context)
323 tree context;
325 struct function *p;
327 if (context)
329 if (context == current_function_decl)
330 cfun->contains_functions = 1;
331 else
333 struct function *containing = find_function_data (context);
334 containing->contains_functions = 1;
338 if (cfun == 0)
339 init_dummy_function_start ();
340 p = cfun;
342 p->outer = outer_function_chain;
343 outer_function_chain = p;
344 p->fixup_var_refs_queue = 0;
346 (*lang_hooks.function.enter_nested) (p);
348 cfun = 0;
351 void
352 push_function_context ()
354 push_function_context_to (current_function_decl);
357 /* Restore the last saved context, at the end of a nested function.
358 This function is called from language-specific code. */
360 void
361 pop_function_context_from (context)
362 tree context ATTRIBUTE_UNUSED;
364 struct function *p = outer_function_chain;
365 struct var_refs_queue *queue;
367 cfun = p;
368 outer_function_chain = p->outer;
370 current_function_decl = p->decl;
371 reg_renumber = 0;
373 restore_emit_status (p);
375 (*lang_hooks.function.leave_nested) (p);
377 /* Finish doing put_var_into_stack for any of our variables which became
378 addressable during the nested function. If only one entry has to be
379 fixed up, just do that one. Otherwise, first make a list of MEMs that
380 are not to be unshared. */
381 if (p->fixup_var_refs_queue == 0)
383 else if (p->fixup_var_refs_queue->next == 0)
384 fixup_var_refs (p->fixup_var_refs_queue->modified,
385 p->fixup_var_refs_queue->promoted_mode,
386 p->fixup_var_refs_queue->unsignedp,
387 p->fixup_var_refs_queue->modified, 0);
388 else
390 rtx list = 0;
392 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
393 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
395 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
396 fixup_var_refs (queue->modified, queue->promoted_mode,
397 queue->unsignedp, list, 0);
401 p->fixup_var_refs_queue = 0;
403 /* Reset variables that have known state during rtx generation. */
404 rtx_equal_function_value_matters = 1;
405 virtuals_instantiated = 0;
406 generating_concat_p = 1;
409 void
410 pop_function_context ()
412 pop_function_context_from (current_function_decl);
415 /* Clear out all parts of the state in F that can safely be discarded
416 after the function has been parsed, but not compiled, to let
417 garbage collection reclaim the memory. */
419 void
420 free_after_parsing (f)
421 struct function *f;
423 /* f->expr->forced_labels is used by code generation. */
424 /* f->emit->regno_reg_rtx is used by code generation. */
425 /* f->varasm is used by code generation. */
426 /* f->eh->eh_return_stub_label is used by code generation. */
428 (*lang_hooks.function.final) (f);
429 f->stmt = NULL;
432 /* Clear out all parts of the state in F that can safely be discarded
433 after the function has been compiled, to let garbage collection
434 reclaim the memory. */
436 void
437 free_after_compilation (f)
438 struct function *f;
440 f->eh = NULL;
441 f->expr = NULL;
442 f->emit = NULL;
443 f->varasm = NULL;
444 f->machine = NULL;
446 f->x_temp_slots = NULL;
447 f->arg_offset_rtx = NULL;
448 f->return_rtx = NULL;
449 f->internal_arg_pointer = NULL;
450 f->x_nonlocal_labels = NULL;
451 f->x_nonlocal_goto_handler_slots = NULL;
452 f->x_nonlocal_goto_handler_labels = NULL;
453 f->x_nonlocal_goto_stack_level = NULL;
454 f->x_cleanup_label = NULL;
455 f->x_return_label = NULL;
456 f->x_save_expr_regs = NULL;
457 f->x_stack_slot_list = NULL;
458 f->x_rtl_expr_chain = NULL;
459 f->x_tail_recursion_label = NULL;
460 f->x_tail_recursion_reentry = NULL;
461 f->x_arg_pointer_save_area = NULL;
462 f->x_clobber_return_insn = NULL;
463 f->x_context_display = NULL;
464 f->x_trampoline_list = NULL;
465 f->x_parm_birth_insn = NULL;
466 f->x_last_parm_insn = NULL;
467 f->x_parm_reg_stack_loc = NULL;
468 f->fixup_var_refs_queue = NULL;
469 f->original_arg_vector = NULL;
470 f->original_decl_initial = NULL;
471 f->inl_last_parm_insn = NULL;
472 f->epilogue_delay_list = NULL;
475 /* Allocate fixed slots in the stack frame of the current function. */
477 /* Return size needed for stack frame based on slots so far allocated in
478 function F.
479 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
480 the caller may have to do that. */
482 HOST_WIDE_INT
483 get_func_frame_size (f)
484 struct function *f;
486 #ifdef FRAME_GROWS_DOWNWARD
487 return -f->x_frame_offset;
488 #else
489 return f->x_frame_offset;
490 #endif
493 /* Return size needed for stack frame based on slots so far allocated.
494 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
495 the caller may have to do that. */
496 HOST_WIDE_INT
497 get_frame_size ()
499 return get_func_frame_size (cfun);
502 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
503 with machine mode MODE.
505 ALIGN controls the amount of alignment for the address of the slot:
506 0 means according to MODE,
507 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
508 positive specifies alignment boundary in bits.
510 We do not round to stack_boundary here.
512 FUNCTION specifies the function to allocate in. */
514 static rtx
515 assign_stack_local_1 (mode, size, align, function)
516 enum machine_mode mode;
517 HOST_WIDE_INT size;
518 int align;
519 struct function *function;
521 rtx x, addr;
522 int bigend_correction = 0;
523 int alignment;
524 int frame_off, frame_alignment, frame_phase;
526 if (align == 0)
528 tree type;
530 if (mode == BLKmode)
531 alignment = BIGGEST_ALIGNMENT;
532 else
533 alignment = GET_MODE_ALIGNMENT (mode);
535 /* Allow the target to (possibly) increase the alignment of this
536 stack slot. */
537 type = (*lang_hooks.types.type_for_mode) (mode, 0);
538 if (type)
539 alignment = LOCAL_ALIGNMENT (type, alignment);
541 alignment /= BITS_PER_UNIT;
543 else if (align == -1)
545 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
546 size = CEIL_ROUND (size, alignment);
548 else
549 alignment = align / BITS_PER_UNIT;
551 #ifdef FRAME_GROWS_DOWNWARD
552 function->x_frame_offset -= size;
553 #endif
555 /* Ignore alignment we can't do with expected alignment of the boundary. */
556 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
557 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
559 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
560 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
562 /* Calculate how many bytes the start of local variables is off from
563 stack alignment. */
564 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
565 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
566 frame_phase = frame_off ? frame_alignment - frame_off : 0;
568 /* Round frame offset to that alignment.
569 We must be careful here, since FRAME_OFFSET might be negative and
570 division with a negative dividend isn't as well defined as we might
571 like. So we instead assume that ALIGNMENT is a power of two and
572 use logical operations which are unambiguous. */
573 #ifdef FRAME_GROWS_DOWNWARD
574 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
575 #else
576 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
577 #endif
579 /* On a big-endian machine, if we are allocating more space than we will use,
580 use the least significant bytes of those that are allocated. */
581 if (BYTES_BIG_ENDIAN && mode != BLKmode)
582 bigend_correction = size - GET_MODE_SIZE (mode);
584 /* If we have already instantiated virtual registers, return the actual
585 address relative to the frame pointer. */
586 if (function == cfun && virtuals_instantiated)
587 addr = plus_constant (frame_pointer_rtx,
588 (frame_offset + bigend_correction
589 + STARTING_FRAME_OFFSET));
590 else
591 addr = plus_constant (virtual_stack_vars_rtx,
592 function->x_frame_offset + bigend_correction);
594 #ifndef FRAME_GROWS_DOWNWARD
595 function->x_frame_offset += size;
596 #endif
598 x = gen_rtx_MEM (mode, addr);
600 function->x_stack_slot_list
601 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
603 return x;
606 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
607 current function. */
610 assign_stack_local (mode, size, align)
611 enum machine_mode mode;
612 HOST_WIDE_INT size;
613 int align;
615 return assign_stack_local_1 (mode, size, align, cfun);
618 /* Allocate a temporary stack slot and record it for possible later
619 reuse.
621 MODE is the machine mode to be given to the returned rtx.
623 SIZE is the size in units of the space required. We do no rounding here
624 since assign_stack_local will do any required rounding.
626 KEEP is 1 if this slot is to be retained after a call to
627 free_temp_slots. Automatic variables for a block are allocated
628 with this flag. KEEP is 2 if we allocate a longer term temporary,
629 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
630 if we are to allocate something at an inner level to be treated as
631 a variable in the block (e.g., a SAVE_EXPR).
633 TYPE is the type that will be used for the stack slot. */
636 assign_stack_temp_for_type (mode, size, keep, type)
637 enum machine_mode mode;
638 HOST_WIDE_INT size;
639 int keep;
640 tree type;
642 unsigned int align;
643 struct temp_slot *p, *best_p = 0;
644 rtx slot;
646 /* If SIZE is -1 it means that somebody tried to allocate a temporary
647 of a variable size. */
648 if (size == -1)
649 abort ();
651 if (mode == BLKmode)
652 align = BIGGEST_ALIGNMENT;
653 else
654 align = GET_MODE_ALIGNMENT (mode);
656 if (! type)
657 type = (*lang_hooks.types.type_for_mode) (mode, 0);
659 if (type)
660 align = LOCAL_ALIGNMENT (type, align);
662 /* Try to find an available, already-allocated temporary of the proper
663 mode which meets the size and alignment requirements. Choose the
664 smallest one with the closest alignment. */
665 for (p = temp_slots; p; p = p->next)
666 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
667 && ! p->in_use
668 && objects_must_conflict_p (p->type, type)
669 && (best_p == 0 || best_p->size > p->size
670 || (best_p->size == p->size && best_p->align > p->align)))
672 if (p->align == align && p->size == size)
674 best_p = 0;
675 break;
677 best_p = p;
680 /* Make our best, if any, the one to use. */
681 if (best_p)
683 /* If there are enough aligned bytes left over, make them into a new
684 temp_slot so that the extra bytes don't get wasted. Do this only
685 for BLKmode slots, so that we can be sure of the alignment. */
686 if (GET_MODE (best_p->slot) == BLKmode)
688 int alignment = best_p->align / BITS_PER_UNIT;
689 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
691 if (best_p->size - rounded_size >= alignment)
693 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
694 p->in_use = p->addr_taken = 0;
695 p->size = best_p->size - rounded_size;
696 p->base_offset = best_p->base_offset + rounded_size;
697 p->full_size = best_p->full_size - rounded_size;
698 p->slot = gen_rtx_MEM (BLKmode,
699 plus_constant (XEXP (best_p->slot, 0),
700 rounded_size));
701 p->align = best_p->align;
702 p->address = 0;
703 p->rtl_expr = 0;
704 p->type = best_p->type;
705 p->next = temp_slots;
706 temp_slots = p;
708 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
709 stack_slot_list);
711 best_p->size = rounded_size;
712 best_p->full_size = rounded_size;
716 p = best_p;
719 /* If we still didn't find one, make a new temporary. */
720 if (p == 0)
722 HOST_WIDE_INT frame_offset_old = frame_offset;
724 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
726 /* We are passing an explicit alignment request to assign_stack_local.
727 One side effect of that is assign_stack_local will not round SIZE
728 to ensure the frame offset remains suitably aligned.
730 So for requests which depended on the rounding of SIZE, we go ahead
731 and round it now. We also make sure ALIGNMENT is at least
732 BIGGEST_ALIGNMENT. */
733 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
734 abort ();
735 p->slot = assign_stack_local (mode,
736 (mode == BLKmode
737 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
738 : size),
739 align);
741 p->align = align;
743 /* The following slot size computation is necessary because we don't
744 know the actual size of the temporary slot until assign_stack_local
745 has performed all the frame alignment and size rounding for the
746 requested temporary. Note that extra space added for alignment
747 can be either above or below this stack slot depending on which
748 way the frame grows. We include the extra space if and only if it
749 is above this slot. */
750 #ifdef FRAME_GROWS_DOWNWARD
751 p->size = frame_offset_old - frame_offset;
752 #else
753 p->size = size;
754 #endif
756 /* Now define the fields used by combine_temp_slots. */
757 #ifdef FRAME_GROWS_DOWNWARD
758 p->base_offset = frame_offset;
759 p->full_size = frame_offset_old - frame_offset;
760 #else
761 p->base_offset = frame_offset_old;
762 p->full_size = frame_offset - frame_offset_old;
763 #endif
764 p->address = 0;
765 p->next = temp_slots;
766 temp_slots = p;
769 p->in_use = 1;
770 p->addr_taken = 0;
771 p->rtl_expr = seq_rtl_expr;
772 p->type = type;
774 if (keep == 2)
776 p->level = target_temp_slot_level;
777 p->keep = 0;
779 else if (keep == 3)
781 p->level = var_temp_slot_level;
782 p->keep = 0;
784 else
786 p->level = temp_slot_level;
787 p->keep = keep;
791 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
792 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
793 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
795 /* If we know the alias set for the memory that will be used, use
796 it. If there's no TYPE, then we don't know anything about the
797 alias set for the memory. */
798 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
799 set_mem_align (slot, align);
801 /* If a type is specified, set the relevant flags. */
802 if (type != 0)
804 RTX_UNCHANGING_P (slot) = TYPE_READONLY (type);
805 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
806 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
809 return slot;
812 /* Allocate a temporary stack slot and record it for possible later
813 reuse. First three arguments are same as in preceding function. */
816 assign_stack_temp (mode, size, keep)
817 enum machine_mode mode;
818 HOST_WIDE_INT size;
819 int keep;
821 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
824 /* Assign a temporary.
825 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
826 and so that should be used in error messages. In either case, we
827 allocate of the given type.
828 KEEP is as for assign_stack_temp.
829 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
830 it is 0 if a register is OK.
831 DONT_PROMOTE is 1 if we should not promote values in register
832 to wider modes. */
835 assign_temp (type_or_decl, keep, memory_required, dont_promote)
836 tree type_or_decl;
837 int keep;
838 int memory_required;
839 int dont_promote ATTRIBUTE_UNUSED;
841 tree type, decl;
842 enum machine_mode mode;
843 #ifndef PROMOTE_FOR_CALL_ONLY
844 int unsignedp;
845 #endif
847 if (DECL_P (type_or_decl))
848 decl = type_or_decl, type = TREE_TYPE (decl);
849 else
850 decl = NULL, type = type_or_decl;
852 mode = TYPE_MODE (type);
853 #ifndef PROMOTE_FOR_CALL_ONLY
854 unsignedp = TREE_UNSIGNED (type);
855 #endif
857 if (mode == BLKmode || memory_required)
859 HOST_WIDE_INT size = int_size_in_bytes (type);
860 rtx tmp;
862 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
863 problems with allocating the stack space. */
864 if (size == 0)
865 size = 1;
867 /* Unfortunately, we don't yet know how to allocate variable-sized
868 temporaries. However, sometimes we have a fixed upper limit on
869 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
870 instead. This is the case for Chill variable-sized strings. */
871 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
872 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
873 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
874 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
876 /* The size of the temporary may be too large to fit into an integer. */
877 /* ??? Not sure this should happen except for user silliness, so limit
878 this to things that aren't compiler-generated temporaries. The
879 rest of the time we'll abort in assign_stack_temp_for_type. */
880 if (decl && size == -1
881 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
883 error_with_decl (decl, "size of variable `%s' is too large");
884 size = 1;
887 tmp = assign_stack_temp_for_type (mode, size, keep, type);
888 return tmp;
891 #ifndef PROMOTE_FOR_CALL_ONLY
892 if (! dont_promote)
893 mode = promote_mode (type, mode, &unsignedp, 0);
894 #endif
896 return gen_reg_rtx (mode);
899 /* Combine temporary stack slots which are adjacent on the stack.
901 This allows for better use of already allocated stack space. This is only
902 done for BLKmode slots because we can be sure that we won't have alignment
903 problems in this case. */
905 void
906 combine_temp_slots ()
908 struct temp_slot *p, *q;
909 struct temp_slot *prev_p, *prev_q;
910 int num_slots;
912 /* We can't combine slots, because the information about which slot
913 is in which alias set will be lost. */
914 if (flag_strict_aliasing)
915 return;
917 /* If there are a lot of temp slots, don't do anything unless
918 high levels of optimization. */
919 if (! flag_expensive_optimizations)
920 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
921 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
922 return;
924 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
926 int delete_p = 0;
928 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
929 for (q = p->next, prev_q = p; q; q = prev_q->next)
931 int delete_q = 0;
932 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
934 if (p->base_offset + p->full_size == q->base_offset)
936 /* Q comes after P; combine Q into P. */
937 p->size += q->size;
938 p->full_size += q->full_size;
939 delete_q = 1;
941 else if (q->base_offset + q->full_size == p->base_offset)
943 /* P comes after Q; combine P into Q. */
944 q->size += p->size;
945 q->full_size += p->full_size;
946 delete_p = 1;
947 break;
950 /* Either delete Q or advance past it. */
951 if (delete_q)
952 prev_q->next = q->next;
953 else
954 prev_q = q;
956 /* Either delete P or advance past it. */
957 if (delete_p)
959 if (prev_p)
960 prev_p->next = p->next;
961 else
962 temp_slots = p->next;
964 else
965 prev_p = p;
969 /* Find the temp slot corresponding to the object at address X. */
971 static struct temp_slot *
972 find_temp_slot_from_address (x)
973 rtx x;
975 struct temp_slot *p;
976 rtx next;
978 for (p = temp_slots; p; p = p->next)
980 if (! p->in_use)
981 continue;
983 else if (XEXP (p->slot, 0) == x
984 || p->address == x
985 || (GET_CODE (x) == PLUS
986 && XEXP (x, 0) == virtual_stack_vars_rtx
987 && GET_CODE (XEXP (x, 1)) == CONST_INT
988 && INTVAL (XEXP (x, 1)) >= p->base_offset
989 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
990 return p;
992 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
993 for (next = p->address; next; next = XEXP (next, 1))
994 if (XEXP (next, 0) == x)
995 return p;
998 /* If we have a sum involving a register, see if it points to a temp
999 slot. */
1000 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1001 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1002 return p;
1003 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1004 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1005 return p;
1007 return 0;
1010 /* Indicate that NEW is an alternate way of referring to the temp slot
1011 that previously was known by OLD. */
1013 void
1014 update_temp_slot_address (old, new)
1015 rtx old, new;
1017 struct temp_slot *p;
1019 if (rtx_equal_p (old, new))
1020 return;
1022 p = find_temp_slot_from_address (old);
1024 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1025 is a register, see if one operand of the PLUS is a temporary
1026 location. If so, NEW points into it. Otherwise, if both OLD and
1027 NEW are a PLUS and if there is a register in common between them.
1028 If so, try a recursive call on those values. */
1029 if (p == 0)
1031 if (GET_CODE (old) != PLUS)
1032 return;
1034 if (GET_CODE (new) == REG)
1036 update_temp_slot_address (XEXP (old, 0), new);
1037 update_temp_slot_address (XEXP (old, 1), new);
1038 return;
1040 else if (GET_CODE (new) != PLUS)
1041 return;
1043 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1044 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1045 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1046 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1047 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1048 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1049 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1050 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1052 return;
1055 /* Otherwise add an alias for the temp's address. */
1056 else if (p->address == 0)
1057 p->address = new;
1058 else
1060 if (GET_CODE (p->address) != EXPR_LIST)
1061 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1063 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1067 /* If X could be a reference to a temporary slot, mark the fact that its
1068 address was taken. */
1070 void
1071 mark_temp_addr_taken (x)
1072 rtx x;
1074 struct temp_slot *p;
1076 if (x == 0)
1077 return;
1079 /* If X is not in memory or is at a constant address, it cannot be in
1080 a temporary slot. */
1081 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1082 return;
1084 p = find_temp_slot_from_address (XEXP (x, 0));
1085 if (p != 0)
1086 p->addr_taken = 1;
1089 /* If X could be a reference to a temporary slot, mark that slot as
1090 belonging to the to one level higher than the current level. If X
1091 matched one of our slots, just mark that one. Otherwise, we can't
1092 easily predict which it is, so upgrade all of them. Kept slots
1093 need not be touched.
1095 This is called when an ({...}) construct occurs and a statement
1096 returns a value in memory. */
1098 void
1099 preserve_temp_slots (x)
1100 rtx x;
1102 struct temp_slot *p = 0;
1104 /* If there is no result, we still might have some objects whose address
1105 were taken, so we need to make sure they stay around. */
1106 if (x == 0)
1108 for (p = temp_slots; p; p = p->next)
1109 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1110 p->level--;
1112 return;
1115 /* If X is a register that is being used as a pointer, see if we have
1116 a temporary slot we know it points to. To be consistent with
1117 the code below, we really should preserve all non-kept slots
1118 if we can't find a match, but that seems to be much too costly. */
1119 if (GET_CODE (x) == REG && REG_POINTER (x))
1120 p = find_temp_slot_from_address (x);
1122 /* If X is not in memory or is at a constant address, it cannot be in
1123 a temporary slot, but it can contain something whose address was
1124 taken. */
1125 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1127 for (p = temp_slots; p; p = p->next)
1128 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1129 p->level--;
1131 return;
1134 /* First see if we can find a match. */
1135 if (p == 0)
1136 p = find_temp_slot_from_address (XEXP (x, 0));
1138 if (p != 0)
1140 /* Move everything at our level whose address was taken to our new
1141 level in case we used its address. */
1142 struct temp_slot *q;
1144 if (p->level == temp_slot_level)
1146 for (q = temp_slots; q; q = q->next)
1147 if (q != p && q->addr_taken && q->level == p->level)
1148 q->level--;
1150 p->level--;
1151 p->addr_taken = 0;
1153 return;
1156 /* Otherwise, preserve all non-kept slots at this level. */
1157 for (p = temp_slots; p; p = p->next)
1158 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1159 p->level--;
1162 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1163 with that RTL_EXPR, promote it into a temporary slot at the present
1164 level so it will not be freed when we free slots made in the
1165 RTL_EXPR. */
1167 void
1168 preserve_rtl_expr_result (x)
1169 rtx x;
1171 struct temp_slot *p;
1173 /* If X is not in memory or is at a constant address, it cannot be in
1174 a temporary slot. */
1175 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1176 return;
1178 /* If we can find a match, move it to our level unless it is already at
1179 an upper level. */
1180 p = find_temp_slot_from_address (XEXP (x, 0));
1181 if (p != 0)
1183 p->level = MIN (p->level, temp_slot_level);
1184 p->rtl_expr = 0;
1187 return;
1190 /* Free all temporaries used so far. This is normally called at the end
1191 of generating code for a statement. Don't free any temporaries
1192 currently in use for an RTL_EXPR that hasn't yet been emitted.
1193 We could eventually do better than this since it can be reused while
1194 generating the same RTL_EXPR, but this is complex and probably not
1195 worthwhile. */
1197 void
1198 free_temp_slots ()
1200 struct temp_slot *p;
1202 for (p = temp_slots; p; p = p->next)
1203 if (p->in_use && p->level == temp_slot_level && ! p->keep
1204 && p->rtl_expr == 0)
1205 p->in_use = 0;
1207 combine_temp_slots ();
1210 /* Free all temporary slots used in T, an RTL_EXPR node. */
1212 void
1213 free_temps_for_rtl_expr (t)
1214 tree t;
1216 struct temp_slot *p;
1218 for (p = temp_slots; p; p = p->next)
1219 if (p->rtl_expr == t)
1221 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1222 needs to be preserved. This can happen if a temporary in
1223 the RTL_EXPR was addressed; preserve_temp_slots will move
1224 the temporary into a higher level. */
1225 if (temp_slot_level <= p->level)
1226 p->in_use = 0;
1227 else
1228 p->rtl_expr = NULL_TREE;
1231 combine_temp_slots ();
1234 /* Mark all temporaries ever allocated in this function as not suitable
1235 for reuse until the current level is exited. */
1237 void
1238 mark_all_temps_used ()
1240 struct temp_slot *p;
1242 for (p = temp_slots; p; p = p->next)
1244 p->in_use = p->keep = 1;
1245 p->level = MIN (p->level, temp_slot_level);
1249 /* Push deeper into the nesting level for stack temporaries. */
1251 void
1252 push_temp_slots ()
1254 temp_slot_level++;
1257 /* Likewise, but save the new level as the place to allocate variables
1258 for blocks. */
1260 #if 0
1261 void
1262 push_temp_slots_for_block ()
1264 push_temp_slots ();
1266 var_temp_slot_level = temp_slot_level;
1269 /* Likewise, but save the new level as the place to allocate temporaries
1270 for TARGET_EXPRs. */
1272 void
1273 push_temp_slots_for_target ()
1275 push_temp_slots ();
1277 target_temp_slot_level = temp_slot_level;
1280 /* Set and get the value of target_temp_slot_level. The only
1281 permitted use of these functions is to save and restore this value. */
1284 get_target_temp_slot_level ()
1286 return target_temp_slot_level;
1289 void
1290 set_target_temp_slot_level (level)
1291 int level;
1293 target_temp_slot_level = level;
1295 #endif
1297 /* Pop a temporary nesting level. All slots in use in the current level
1298 are freed. */
1300 void
1301 pop_temp_slots ()
1303 struct temp_slot *p;
1305 for (p = temp_slots; p; p = p->next)
1306 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1307 p->in_use = 0;
1309 combine_temp_slots ();
1311 temp_slot_level--;
1314 /* Initialize temporary slots. */
1316 void
1317 init_temp_slots ()
1319 /* We have not allocated any temporaries yet. */
1320 temp_slots = 0;
1321 temp_slot_level = 0;
1322 var_temp_slot_level = 0;
1323 target_temp_slot_level = 0;
1326 /* Retroactively move an auto variable from a register to a stack slot.
1327 This is done when an address-reference to the variable is seen. */
1329 void
1330 put_var_into_stack (decl)
1331 tree decl;
1333 rtx reg;
1334 enum machine_mode promoted_mode, decl_mode;
1335 struct function *function = 0;
1336 tree context;
1337 int can_use_addressof;
1338 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1339 int usedp = (TREE_USED (decl)
1340 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1342 context = decl_function_context (decl);
1344 /* Get the current rtl used for this object and its original mode. */
1345 reg = (TREE_CODE (decl) == SAVE_EXPR
1346 ? SAVE_EXPR_RTL (decl)
1347 : DECL_RTL_IF_SET (decl));
1349 /* No need to do anything if decl has no rtx yet
1350 since in that case caller is setting TREE_ADDRESSABLE
1351 and a stack slot will be assigned when the rtl is made. */
1352 if (reg == 0)
1353 return;
1355 /* Get the declared mode for this object. */
1356 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1357 : DECL_MODE (decl));
1358 /* Get the mode it's actually stored in. */
1359 promoted_mode = GET_MODE (reg);
1361 /* If this variable comes from an outer function, find that
1362 function's saved context. Don't use find_function_data here,
1363 because it might not be in any active function.
1364 FIXME: Is that really supposed to happen?
1365 It does in ObjC at least. */
1366 if (context != current_function_decl && context != inline_function_decl)
1367 for (function = outer_function_chain; function; function = function->outer)
1368 if (function->decl == context)
1369 break;
1371 /* If this is a variable-size object with a pseudo to address it,
1372 put that pseudo into the stack, if the var is nonlocal. */
1373 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1374 && GET_CODE (reg) == MEM
1375 && GET_CODE (XEXP (reg, 0)) == REG
1376 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1378 reg = XEXP (reg, 0);
1379 decl_mode = promoted_mode = GET_MODE (reg);
1382 can_use_addressof
1383 = (function == 0
1384 && optimize > 0
1385 /* FIXME make it work for promoted modes too */
1386 && decl_mode == promoted_mode
1387 #ifdef NON_SAVING_SETJMP
1388 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1389 #endif
1392 /* If we can't use ADDRESSOF, make sure we see through one we already
1393 generated. */
1394 if (! can_use_addressof && GET_CODE (reg) == MEM
1395 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1396 reg = XEXP (XEXP (reg, 0), 0);
1398 /* Now we should have a value that resides in one or more pseudo regs. */
1400 if (GET_CODE (reg) == REG)
1402 /* If this variable lives in the current function and we don't need
1403 to put things in the stack for the sake of setjmp, try to keep it
1404 in a register until we know we actually need the address. */
1405 if (can_use_addressof)
1406 gen_mem_addressof (reg, decl);
1407 else
1408 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1409 decl_mode, volatilep, 0, usedp, 0);
1411 else if (GET_CODE (reg) == CONCAT)
1413 /* A CONCAT contains two pseudos; put them both in the stack.
1414 We do it so they end up consecutive.
1415 We fixup references to the parts only after we fixup references
1416 to the whole CONCAT, lest we do double fixups for the latter
1417 references. */
1418 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1419 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1420 rtx lopart = XEXP (reg, 0);
1421 rtx hipart = XEXP (reg, 1);
1422 #ifdef FRAME_GROWS_DOWNWARD
1423 /* Since part 0 should have a lower address, do it second. */
1424 put_reg_into_stack (function, hipart, part_type, part_mode,
1425 part_mode, volatilep, 0, 0, 0);
1426 put_reg_into_stack (function, lopart, part_type, part_mode,
1427 part_mode, volatilep, 0, 0, 0);
1428 #else
1429 put_reg_into_stack (function, lopart, part_type, part_mode,
1430 part_mode, volatilep, 0, 0, 0);
1431 put_reg_into_stack (function, hipart, part_type, part_mode,
1432 part_mode, volatilep, 0, 0, 0);
1433 #endif
1435 /* Change the CONCAT into a combined MEM for both parts. */
1436 PUT_CODE (reg, MEM);
1437 MEM_ATTRS (reg) = 0;
1439 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1440 already computed alias sets. Here we want to re-generate. */
1441 if (DECL_P (decl))
1442 SET_DECL_RTL (decl, NULL);
1443 set_mem_attributes (reg, decl, 1);
1444 if (DECL_P (decl))
1445 SET_DECL_RTL (decl, reg);
1447 /* The two parts are in memory order already.
1448 Use the lower parts address as ours. */
1449 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1450 /* Prevent sharing of rtl that might lose. */
1451 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1452 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1453 if (usedp)
1455 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1456 promoted_mode, 0);
1457 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1458 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1461 else
1462 return;
1465 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1466 into the stack frame of FUNCTION (0 means the current function).
1467 DECL_MODE is the machine mode of the user-level data type.
1468 PROMOTED_MODE is the machine mode of the register.
1469 VOLATILE_P is nonzero if this is for a "volatile" decl.
1470 USED_P is nonzero if this reg might have already been used in an insn. */
1472 static void
1473 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1474 original_regno, used_p, ht)
1475 struct function *function;
1476 rtx reg;
1477 tree type;
1478 enum machine_mode promoted_mode, decl_mode;
1479 int volatile_p;
1480 unsigned int original_regno;
1481 int used_p;
1482 htab_t ht;
1484 struct function *func = function ? function : cfun;
1485 rtx new = 0;
1486 unsigned int regno = original_regno;
1488 if (regno == 0)
1489 regno = REGNO (reg);
1491 if (regno < func->x_max_parm_reg)
1492 new = func->x_parm_reg_stack_loc[regno];
1494 if (new == 0)
1495 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1497 PUT_CODE (reg, MEM);
1498 PUT_MODE (reg, decl_mode);
1499 XEXP (reg, 0) = XEXP (new, 0);
1500 MEM_ATTRS (reg) = 0;
1501 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1502 MEM_VOLATILE_P (reg) = volatile_p;
1504 /* If this is a memory ref that contains aggregate components,
1505 mark it as such for cse and loop optimize. If we are reusing a
1506 previously generated stack slot, then we need to copy the bit in
1507 case it was set for other reasons. For instance, it is set for
1508 __builtin_va_alist. */
1509 if (type)
1511 MEM_SET_IN_STRUCT_P (reg,
1512 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1513 set_mem_alias_set (reg, get_alias_set (type));
1516 if (used_p)
1517 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1520 /* Make sure that all refs to the variable, previously made
1521 when it was a register, are fixed up to be valid again.
1522 See function above for meaning of arguments. */
1524 static void
1525 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1526 struct function *function;
1527 rtx reg;
1528 tree type;
1529 enum machine_mode promoted_mode;
1530 htab_t ht;
1532 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1534 if (function != 0)
1536 struct var_refs_queue *temp;
1538 temp
1539 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1540 temp->modified = reg;
1541 temp->promoted_mode = promoted_mode;
1542 temp->unsignedp = unsigned_p;
1543 temp->next = function->fixup_var_refs_queue;
1544 function->fixup_var_refs_queue = temp;
1546 else
1547 /* Variable is local; fix it up now. */
1548 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1551 static void
1552 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1553 rtx var;
1554 enum machine_mode promoted_mode;
1555 int unsignedp;
1556 htab_t ht;
1557 rtx may_share;
1559 tree pending;
1560 rtx first_insn = get_insns ();
1561 struct sequence_stack *stack = seq_stack;
1562 tree rtl_exps = rtl_expr_chain;
1564 /* If there's a hash table, it must record all uses of VAR. */
1565 if (ht)
1567 if (stack != 0)
1568 abort ();
1569 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1570 may_share);
1571 return;
1574 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1575 stack == 0, may_share);
1577 /* Scan all pending sequences too. */
1578 for (; stack; stack = stack->next)
1580 push_to_full_sequence (stack->first, stack->last);
1581 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1582 stack->next != 0, may_share);
1583 /* Update remembered end of sequence
1584 in case we added an insn at the end. */
1585 stack->last = get_last_insn ();
1586 end_sequence ();
1589 /* Scan all waiting RTL_EXPRs too. */
1590 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1592 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1593 if (seq != const0_rtx && seq != 0)
1595 push_to_sequence (seq);
1596 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1597 may_share);
1598 end_sequence ();
1603 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1604 some part of an insn. Return a struct fixup_replacement whose OLD
1605 value is equal to X. Allocate a new structure if no such entry exists. */
1607 static struct fixup_replacement *
1608 find_fixup_replacement (replacements, x)
1609 struct fixup_replacement **replacements;
1610 rtx x;
1612 struct fixup_replacement *p;
1614 /* See if we have already replaced this. */
1615 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1618 if (p == 0)
1620 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1621 p->old = x;
1622 p->new = 0;
1623 p->next = *replacements;
1624 *replacements = p;
1627 return p;
1630 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1631 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1632 for the current function. MAY_SHARE is either a MEM that is not
1633 to be unshared or a list of them. */
1635 static void
1636 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1637 rtx insn;
1638 rtx var;
1639 enum machine_mode promoted_mode;
1640 int unsignedp;
1641 int toplevel;
1642 rtx may_share;
1644 while (insn)
1646 /* fixup_var_refs_insn might modify insn, so save its next
1647 pointer now. */
1648 rtx next = NEXT_INSN (insn);
1650 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1651 the three sequences they (potentially) contain, and process
1652 them recursively. The CALL_INSN itself is not interesting. */
1654 if (GET_CODE (insn) == CALL_INSN
1655 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1657 int i;
1659 /* Look at the Normal call, sibling call and tail recursion
1660 sequences attached to the CALL_PLACEHOLDER. */
1661 for (i = 0; i < 3; i++)
1663 rtx seq = XEXP (PATTERN (insn), i);
1664 if (seq)
1666 push_to_sequence (seq);
1667 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1668 may_share);
1669 XEXP (PATTERN (insn), i) = get_insns ();
1670 end_sequence ();
1675 else if (INSN_P (insn))
1676 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1677 may_share);
1679 insn = next;
1683 /* Look up the insns which reference VAR in HT and fix them up. Other
1684 arguments are the same as fixup_var_refs_insns.
1686 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1687 because the hash table will point straight to the interesting insn
1688 (inside the CALL_PLACEHOLDER). */
1690 static void
1691 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1692 htab_t ht;
1693 rtx var;
1694 enum machine_mode promoted_mode;
1695 int unsignedp;
1696 rtx may_share;
1698 struct insns_for_mem_entry tmp;
1699 struct insns_for_mem_entry *ime;
1700 rtx insn_list;
1702 tmp.key = var;
1703 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1704 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1705 if (INSN_P (XEXP (insn_list, 0)))
1706 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1707 unsignedp, 1, may_share);
1711 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1712 the insn under examination, VAR is the variable to fix up
1713 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1714 TOPLEVEL is nonzero if this is the main insn chain for this
1715 function. */
1717 static void
1718 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1719 rtx insn;
1720 rtx var;
1721 enum machine_mode promoted_mode;
1722 int unsignedp;
1723 int toplevel;
1724 rtx no_share;
1726 rtx call_dest = 0;
1727 rtx set, prev, prev_set;
1728 rtx note;
1730 /* Remember the notes in case we delete the insn. */
1731 note = REG_NOTES (insn);
1733 /* If this is a CLOBBER of VAR, delete it.
1735 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1736 and REG_RETVAL notes too. */
1737 if (GET_CODE (PATTERN (insn)) == CLOBBER
1738 && (XEXP (PATTERN (insn), 0) == var
1739 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1740 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1741 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1743 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1744 /* The REG_LIBCALL note will go away since we are going to
1745 turn INSN into a NOTE, so just delete the
1746 corresponding REG_RETVAL note. */
1747 remove_note (XEXP (note, 0),
1748 find_reg_note (XEXP (note, 0), REG_RETVAL,
1749 NULL_RTX));
1751 delete_insn (insn);
1754 /* The insn to load VAR from a home in the arglist
1755 is now a no-op. When we see it, just delete it.
1756 Similarly if this is storing VAR from a register from which
1757 it was loaded in the previous insn. This will occur
1758 when an ADDRESSOF was made for an arglist slot. */
1759 else if (toplevel
1760 && (set = single_set (insn)) != 0
1761 && SET_DEST (set) == var
1762 /* If this represents the result of an insn group,
1763 don't delete the insn. */
1764 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1765 && (rtx_equal_p (SET_SRC (set), var)
1766 || (GET_CODE (SET_SRC (set)) == REG
1767 && (prev = prev_nonnote_insn (insn)) != 0
1768 && (prev_set = single_set (prev)) != 0
1769 && SET_DEST (prev_set) == SET_SRC (set)
1770 && rtx_equal_p (SET_SRC (prev_set), var))))
1772 delete_insn (insn);
1774 else
1776 struct fixup_replacement *replacements = 0;
1777 rtx next_insn = NEXT_INSN (insn);
1779 if (SMALL_REGISTER_CLASSES)
1781 /* If the insn that copies the results of a CALL_INSN
1782 into a pseudo now references VAR, we have to use an
1783 intermediate pseudo since we want the life of the
1784 return value register to be only a single insn.
1786 If we don't use an intermediate pseudo, such things as
1787 address computations to make the address of VAR valid
1788 if it is not can be placed between the CALL_INSN and INSN.
1790 To make sure this doesn't happen, we record the destination
1791 of the CALL_INSN and see if the next insn uses both that
1792 and VAR. */
1794 if (call_dest != 0 && GET_CODE (insn) == INSN
1795 && reg_mentioned_p (var, PATTERN (insn))
1796 && reg_mentioned_p (call_dest, PATTERN (insn)))
1798 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1800 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1802 PATTERN (insn) = replace_rtx (PATTERN (insn),
1803 call_dest, temp);
1806 if (GET_CODE (insn) == CALL_INSN
1807 && GET_CODE (PATTERN (insn)) == SET)
1808 call_dest = SET_DEST (PATTERN (insn));
1809 else if (GET_CODE (insn) == CALL_INSN
1810 && GET_CODE (PATTERN (insn)) == PARALLEL
1811 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1812 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1813 else
1814 call_dest = 0;
1817 /* See if we have to do anything to INSN now that VAR is in
1818 memory. If it needs to be loaded into a pseudo, use a single
1819 pseudo for the entire insn in case there is a MATCH_DUP
1820 between two operands. We pass a pointer to the head of
1821 a list of struct fixup_replacements. If fixup_var_refs_1
1822 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1823 it will record them in this list.
1825 If it allocated a pseudo for any replacement, we copy into
1826 it here. */
1828 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1829 &replacements, no_share);
1831 /* If this is last_parm_insn, and any instructions were output
1832 after it to fix it up, then we must set last_parm_insn to
1833 the last such instruction emitted. */
1834 if (insn == last_parm_insn)
1835 last_parm_insn = PREV_INSN (next_insn);
1837 while (replacements)
1839 struct fixup_replacement *next;
1841 if (GET_CODE (replacements->new) == REG)
1843 rtx insert_before;
1844 rtx seq;
1846 /* OLD might be a (subreg (mem)). */
1847 if (GET_CODE (replacements->old) == SUBREG)
1848 replacements->old
1849 = fixup_memory_subreg (replacements->old, insn,
1850 promoted_mode, 0);
1851 else
1852 replacements->old
1853 = fixup_stack_1 (replacements->old, insn);
1855 insert_before = insn;
1857 /* If we are changing the mode, do a conversion.
1858 This might be wasteful, but combine.c will
1859 eliminate much of the waste. */
1861 if (GET_MODE (replacements->new)
1862 != GET_MODE (replacements->old))
1864 start_sequence ();
1865 convert_move (replacements->new,
1866 replacements->old, unsignedp);
1867 seq = get_insns ();
1868 end_sequence ();
1870 else
1871 seq = gen_move_insn (replacements->new,
1872 replacements->old);
1874 emit_insn_before (seq, insert_before);
1877 next = replacements->next;
1878 free (replacements);
1879 replacements = next;
1883 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1884 But don't touch other insns referred to by reg-notes;
1885 we will get them elsewhere. */
1886 while (note)
1888 if (GET_CODE (note) != INSN_LIST)
1889 XEXP (note, 0)
1890 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1891 promoted_mode, 1);
1892 note = XEXP (note, 1);
1896 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1897 See if the rtx expression at *LOC in INSN needs to be changed.
1899 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1900 contain a list of original rtx's and replacements. If we find that we need
1901 to modify this insn by replacing a memory reference with a pseudo or by
1902 making a new MEM to implement a SUBREG, we consult that list to see if
1903 we have already chosen a replacement. If none has already been allocated,
1904 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1905 or the SUBREG, as appropriate, to the pseudo. */
1907 static void
1908 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1909 rtx var;
1910 enum machine_mode promoted_mode;
1911 rtx *loc;
1912 rtx insn;
1913 struct fixup_replacement **replacements;
1914 rtx no_share;
1916 int i;
1917 rtx x = *loc;
1918 RTX_CODE code = GET_CODE (x);
1919 const char *fmt;
1920 rtx tem, tem1;
1921 struct fixup_replacement *replacement;
1923 switch (code)
1925 case ADDRESSOF:
1926 if (XEXP (x, 0) == var)
1928 /* Prevent sharing of rtl that might lose. */
1929 rtx sub = copy_rtx (XEXP (var, 0));
1931 if (! validate_change (insn, loc, sub, 0))
1933 rtx y = gen_reg_rtx (GET_MODE (sub));
1934 rtx seq, new_insn;
1936 /* We should be able to replace with a register or all is lost.
1937 Note that we can't use validate_change to verify this, since
1938 we're not caring for replacing all dups simultaneously. */
1939 if (! validate_replace_rtx (*loc, y, insn))
1940 abort ();
1942 /* Careful! First try to recognize a direct move of the
1943 value, mimicking how things are done in gen_reload wrt
1944 PLUS. Consider what happens when insn is a conditional
1945 move instruction and addsi3 clobbers flags. */
1947 start_sequence ();
1948 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1949 seq = get_insns ();
1950 end_sequence ();
1952 if (recog_memoized (new_insn) < 0)
1954 /* That failed. Fall back on force_operand and hope. */
1956 start_sequence ();
1957 sub = force_operand (sub, y);
1958 if (sub != y)
1959 emit_insn (gen_move_insn (y, sub));
1960 seq = get_insns ();
1961 end_sequence ();
1964 #ifdef HAVE_cc0
1965 /* Don't separate setter from user. */
1966 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1967 insn = PREV_INSN (insn);
1968 #endif
1970 emit_insn_before (seq, insn);
1973 return;
1975 case MEM:
1976 if (var == x)
1978 /* If we already have a replacement, use it. Otherwise,
1979 try to fix up this address in case it is invalid. */
1981 replacement = find_fixup_replacement (replacements, var);
1982 if (replacement->new)
1984 *loc = replacement->new;
1985 return;
1988 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1990 /* Unless we are forcing memory to register or we changed the mode,
1991 we can leave things the way they are if the insn is valid. */
1993 INSN_CODE (insn) = -1;
1994 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1995 && recog_memoized (insn) >= 0)
1996 return;
1998 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1999 return;
2002 /* If X contains VAR, we need to unshare it here so that we update
2003 each occurrence separately. But all identical MEMs in one insn
2004 must be replaced with the same rtx because of the possibility of
2005 MATCH_DUPs. */
2007 if (reg_mentioned_p (var, x))
2009 replacement = find_fixup_replacement (replacements, x);
2010 if (replacement->new == 0)
2011 replacement->new = copy_most_rtx (x, no_share);
2013 *loc = x = replacement->new;
2014 code = GET_CODE (x);
2016 break;
2018 case REG:
2019 case CC0:
2020 case PC:
2021 case CONST_INT:
2022 case CONST:
2023 case SYMBOL_REF:
2024 case LABEL_REF:
2025 case CONST_DOUBLE:
2026 case CONST_VECTOR:
2027 return;
2029 case SIGN_EXTRACT:
2030 case ZERO_EXTRACT:
2031 /* Note that in some cases those types of expressions are altered
2032 by optimize_bit_field, and do not survive to get here. */
2033 if (XEXP (x, 0) == var
2034 || (GET_CODE (XEXP (x, 0)) == SUBREG
2035 && SUBREG_REG (XEXP (x, 0)) == var))
2037 /* Get TEM as a valid MEM in the mode presently in the insn.
2039 We don't worry about the possibility of MATCH_DUP here; it
2040 is highly unlikely and would be tricky to handle. */
2042 tem = XEXP (x, 0);
2043 if (GET_CODE (tem) == SUBREG)
2045 if (GET_MODE_BITSIZE (GET_MODE (tem))
2046 > GET_MODE_BITSIZE (GET_MODE (var)))
2048 replacement = find_fixup_replacement (replacements, var);
2049 if (replacement->new == 0)
2050 replacement->new = gen_reg_rtx (GET_MODE (var));
2051 SUBREG_REG (tem) = replacement->new;
2053 /* The following code works only if we have a MEM, so we
2054 need to handle the subreg here. We directly substitute
2055 it assuming that a subreg must be OK here. We already
2056 scheduled a replacement to copy the mem into the
2057 subreg. */
2058 XEXP (x, 0) = tem;
2059 return;
2061 else
2062 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2064 else
2065 tem = fixup_stack_1 (tem, insn);
2067 /* Unless we want to load from memory, get TEM into the proper mode
2068 for an extract from memory. This can only be done if the
2069 extract is at a constant position and length. */
2071 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2072 && GET_CODE (XEXP (x, 2)) == CONST_INT
2073 && ! mode_dependent_address_p (XEXP (tem, 0))
2074 && ! MEM_VOLATILE_P (tem))
2076 enum machine_mode wanted_mode = VOIDmode;
2077 enum machine_mode is_mode = GET_MODE (tem);
2078 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2080 if (GET_CODE (x) == ZERO_EXTRACT)
2082 enum machine_mode new_mode
2083 = mode_for_extraction (EP_extzv, 1);
2084 if (new_mode != MAX_MACHINE_MODE)
2085 wanted_mode = new_mode;
2087 else if (GET_CODE (x) == SIGN_EXTRACT)
2089 enum machine_mode new_mode
2090 = mode_for_extraction (EP_extv, 1);
2091 if (new_mode != MAX_MACHINE_MODE)
2092 wanted_mode = new_mode;
2095 /* If we have a narrower mode, we can do something. */
2096 if (wanted_mode != VOIDmode
2097 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2099 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2100 rtx old_pos = XEXP (x, 2);
2101 rtx newmem;
2103 /* If the bytes and bits are counted differently, we
2104 must adjust the offset. */
2105 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2106 offset = (GET_MODE_SIZE (is_mode)
2107 - GET_MODE_SIZE (wanted_mode) - offset);
2109 pos %= GET_MODE_BITSIZE (wanted_mode);
2111 newmem = adjust_address_nv (tem, wanted_mode, offset);
2113 /* Make the change and see if the insn remains valid. */
2114 INSN_CODE (insn) = -1;
2115 XEXP (x, 0) = newmem;
2116 XEXP (x, 2) = GEN_INT (pos);
2118 if (recog_memoized (insn) >= 0)
2119 return;
2121 /* Otherwise, restore old position. XEXP (x, 0) will be
2122 restored later. */
2123 XEXP (x, 2) = old_pos;
2127 /* If we get here, the bitfield extract insn can't accept a memory
2128 reference. Copy the input into a register. */
2130 tem1 = gen_reg_rtx (GET_MODE (tem));
2131 emit_insn_before (gen_move_insn (tem1, tem), insn);
2132 XEXP (x, 0) = tem1;
2133 return;
2135 break;
2137 case SUBREG:
2138 if (SUBREG_REG (x) == var)
2140 /* If this is a special SUBREG made because VAR was promoted
2141 from a wider mode, replace it with VAR and call ourself
2142 recursively, this time saying that the object previously
2143 had its current mode (by virtue of the SUBREG). */
2145 if (SUBREG_PROMOTED_VAR_P (x))
2147 *loc = var;
2148 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2149 no_share);
2150 return;
2153 /* If this SUBREG makes VAR wider, it has become a paradoxical
2154 SUBREG with VAR in memory, but these aren't allowed at this
2155 stage of the compilation. So load VAR into a pseudo and take
2156 a SUBREG of that pseudo. */
2157 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2159 replacement = find_fixup_replacement (replacements, var);
2160 if (replacement->new == 0)
2161 replacement->new = gen_reg_rtx (promoted_mode);
2162 SUBREG_REG (x) = replacement->new;
2163 return;
2166 /* See if we have already found a replacement for this SUBREG.
2167 If so, use it. Otherwise, make a MEM and see if the insn
2168 is recognized. If not, or if we should force MEM into a register,
2169 make a pseudo for this SUBREG. */
2170 replacement = find_fixup_replacement (replacements, x);
2171 if (replacement->new)
2173 *loc = replacement->new;
2174 return;
2177 replacement->new = *loc = fixup_memory_subreg (x, insn,
2178 promoted_mode, 0);
2180 INSN_CODE (insn) = -1;
2181 if (! flag_force_mem && recog_memoized (insn) >= 0)
2182 return;
2184 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2185 return;
2187 break;
2189 case SET:
2190 /* First do special simplification of bit-field references. */
2191 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2192 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2193 optimize_bit_field (x, insn, 0);
2194 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2195 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2196 optimize_bit_field (x, insn, 0);
2198 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2199 into a register and then store it back out. */
2200 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2201 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2202 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2203 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2204 > GET_MODE_SIZE (GET_MODE (var))))
2206 replacement = find_fixup_replacement (replacements, var);
2207 if (replacement->new == 0)
2208 replacement->new = gen_reg_rtx (GET_MODE (var));
2210 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2211 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2214 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2215 insn into a pseudo and store the low part of the pseudo into VAR. */
2216 if (GET_CODE (SET_DEST (x)) == SUBREG
2217 && SUBREG_REG (SET_DEST (x)) == var
2218 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2219 > GET_MODE_SIZE (GET_MODE (var))))
2221 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2222 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2223 tem)),
2224 insn);
2225 break;
2229 rtx dest = SET_DEST (x);
2230 rtx src = SET_SRC (x);
2231 rtx outerdest = dest;
2233 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2234 || GET_CODE (dest) == SIGN_EXTRACT
2235 || GET_CODE (dest) == ZERO_EXTRACT)
2236 dest = XEXP (dest, 0);
2238 if (GET_CODE (src) == SUBREG)
2239 src = SUBREG_REG (src);
2241 /* If VAR does not appear at the top level of the SET
2242 just scan the lower levels of the tree. */
2244 if (src != var && dest != var)
2245 break;
2247 /* We will need to rerecognize this insn. */
2248 INSN_CODE (insn) = -1;
2250 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2251 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2253 /* Since this case will return, ensure we fixup all the
2254 operands here. */
2255 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2256 insn, replacements, no_share);
2257 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2258 insn, replacements, no_share);
2259 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2260 insn, replacements, no_share);
2262 tem = XEXP (outerdest, 0);
2264 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2265 that may appear inside a ZERO_EXTRACT.
2266 This was legitimate when the MEM was a REG. */
2267 if (GET_CODE (tem) == SUBREG
2268 && SUBREG_REG (tem) == var)
2269 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2270 else
2271 tem = fixup_stack_1 (tem, insn);
2273 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2274 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2275 && ! mode_dependent_address_p (XEXP (tem, 0))
2276 && ! MEM_VOLATILE_P (tem))
2278 enum machine_mode wanted_mode;
2279 enum machine_mode is_mode = GET_MODE (tem);
2280 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2282 wanted_mode = mode_for_extraction (EP_insv, 0);
2284 /* If we have a narrower mode, we can do something. */
2285 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2287 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2288 rtx old_pos = XEXP (outerdest, 2);
2289 rtx newmem;
2291 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2292 offset = (GET_MODE_SIZE (is_mode)
2293 - GET_MODE_SIZE (wanted_mode) - offset);
2295 pos %= GET_MODE_BITSIZE (wanted_mode);
2297 newmem = adjust_address_nv (tem, wanted_mode, offset);
2299 /* Make the change and see if the insn remains valid. */
2300 INSN_CODE (insn) = -1;
2301 XEXP (outerdest, 0) = newmem;
2302 XEXP (outerdest, 2) = GEN_INT (pos);
2304 if (recog_memoized (insn) >= 0)
2305 return;
2307 /* Otherwise, restore old position. XEXP (x, 0) will be
2308 restored later. */
2309 XEXP (outerdest, 2) = old_pos;
2313 /* If we get here, the bit-field store doesn't allow memory
2314 or isn't located at a constant position. Load the value into
2315 a register, do the store, and put it back into memory. */
2317 tem1 = gen_reg_rtx (GET_MODE (tem));
2318 emit_insn_before (gen_move_insn (tem1, tem), insn);
2319 emit_insn_after (gen_move_insn (tem, tem1), insn);
2320 XEXP (outerdest, 0) = tem1;
2321 return;
2324 /* STRICT_LOW_PART is a no-op on memory references
2325 and it can cause combinations to be unrecognizable,
2326 so eliminate it. */
2328 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2329 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2331 /* A valid insn to copy VAR into or out of a register
2332 must be left alone, to avoid an infinite loop here.
2333 If the reference to VAR is by a subreg, fix that up,
2334 since SUBREG is not valid for a memref.
2335 Also fix up the address of the stack slot.
2337 Note that we must not try to recognize the insn until
2338 after we know that we have valid addresses and no
2339 (subreg (mem ...) ...) constructs, since these interfere
2340 with determining the validity of the insn. */
2342 if ((SET_SRC (x) == var
2343 || (GET_CODE (SET_SRC (x)) == SUBREG
2344 && SUBREG_REG (SET_SRC (x)) == var))
2345 && (GET_CODE (SET_DEST (x)) == REG
2346 || (GET_CODE (SET_DEST (x)) == SUBREG
2347 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2348 && GET_MODE (var) == promoted_mode
2349 && x == single_set (insn))
2351 rtx pat, last;
2353 if (GET_CODE (SET_SRC (x)) == SUBREG
2354 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2355 > GET_MODE_SIZE (GET_MODE (var))))
2357 /* This (subreg VAR) is now a paradoxical subreg. We need
2358 to replace VAR instead of the subreg. */
2359 replacement = find_fixup_replacement (replacements, var);
2360 if (replacement->new == NULL_RTX)
2361 replacement->new = gen_reg_rtx (GET_MODE (var));
2362 SUBREG_REG (SET_SRC (x)) = replacement->new;
2364 else
2366 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2367 if (replacement->new)
2368 SET_SRC (x) = replacement->new;
2369 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2370 SET_SRC (x) = replacement->new
2371 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2373 else
2374 SET_SRC (x) = replacement->new
2375 = fixup_stack_1 (SET_SRC (x), insn);
2378 if (recog_memoized (insn) >= 0)
2379 return;
2381 /* INSN is not valid, but we know that we want to
2382 copy SET_SRC (x) to SET_DEST (x) in some way. So
2383 we generate the move and see whether it requires more
2384 than one insn. If it does, we emit those insns and
2385 delete INSN. Otherwise, we an just replace the pattern
2386 of INSN; we have already verified above that INSN has
2387 no other function that to do X. */
2389 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2390 if (NEXT_INSN (pat) != NULL_RTX)
2392 last = emit_insn_before (pat, insn);
2394 /* INSN might have REG_RETVAL or other important notes, so
2395 we need to store the pattern of the last insn in the
2396 sequence into INSN similarly to the normal case. LAST
2397 should not have REG_NOTES, but we allow them if INSN has
2398 no REG_NOTES. */
2399 if (REG_NOTES (last) && REG_NOTES (insn))
2400 abort ();
2401 if (REG_NOTES (last))
2402 REG_NOTES (insn) = REG_NOTES (last);
2403 PATTERN (insn) = PATTERN (last);
2405 delete_insn (last);
2407 else
2408 PATTERN (insn) = PATTERN (pat);
2410 return;
2413 if ((SET_DEST (x) == var
2414 || (GET_CODE (SET_DEST (x)) == SUBREG
2415 && SUBREG_REG (SET_DEST (x)) == var))
2416 && (GET_CODE (SET_SRC (x)) == REG
2417 || (GET_CODE (SET_SRC (x)) == SUBREG
2418 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2419 && GET_MODE (var) == promoted_mode
2420 && x == single_set (insn))
2422 rtx pat, last;
2424 if (GET_CODE (SET_DEST (x)) == SUBREG)
2425 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2426 promoted_mode, 0);
2427 else
2428 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2430 if (recog_memoized (insn) >= 0)
2431 return;
2433 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2434 if (NEXT_INSN (pat) != NULL_RTX)
2436 last = emit_insn_before (pat, insn);
2438 /* INSN might have REG_RETVAL or other important notes, so
2439 we need to store the pattern of the last insn in the
2440 sequence into INSN similarly to the normal case. LAST
2441 should not have REG_NOTES, but we allow them if INSN has
2442 no REG_NOTES. */
2443 if (REG_NOTES (last) && REG_NOTES (insn))
2444 abort ();
2445 if (REG_NOTES (last))
2446 REG_NOTES (insn) = REG_NOTES (last);
2447 PATTERN (insn) = PATTERN (last);
2449 delete_insn (last);
2451 else
2452 PATTERN (insn) = PATTERN (pat);
2454 return;
2457 /* Otherwise, storing into VAR must be handled specially
2458 by storing into a temporary and copying that into VAR
2459 with a new insn after this one. Note that this case
2460 will be used when storing into a promoted scalar since
2461 the insn will now have different modes on the input
2462 and output and hence will be invalid (except for the case
2463 of setting it to a constant, which does not need any
2464 change if it is valid). We generate extra code in that case,
2465 but combine.c will eliminate it. */
2467 if (dest == var)
2469 rtx temp;
2470 rtx fixeddest = SET_DEST (x);
2471 enum machine_mode temp_mode;
2473 /* STRICT_LOW_PART can be discarded, around a MEM. */
2474 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2475 fixeddest = XEXP (fixeddest, 0);
2476 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2477 if (GET_CODE (fixeddest) == SUBREG)
2479 fixeddest = fixup_memory_subreg (fixeddest, insn,
2480 promoted_mode, 0);
2481 temp_mode = GET_MODE (fixeddest);
2483 else
2485 fixeddest = fixup_stack_1 (fixeddest, insn);
2486 temp_mode = promoted_mode;
2489 temp = gen_reg_rtx (temp_mode);
2491 emit_insn_after (gen_move_insn (fixeddest,
2492 gen_lowpart (GET_MODE (fixeddest),
2493 temp)),
2494 insn);
2496 SET_DEST (x) = temp;
2500 default:
2501 break;
2504 /* Nothing special about this RTX; fix its operands. */
2506 fmt = GET_RTX_FORMAT (code);
2507 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2509 if (fmt[i] == 'e')
2510 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2511 no_share);
2512 else if (fmt[i] == 'E')
2514 int j;
2515 for (j = 0; j < XVECLEN (x, i); j++)
2516 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2517 insn, replacements, no_share);
2522 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2523 The REG was placed on the stack, so X now has the form (SUBREG:m1
2524 (MEM:m2 ...)).
2526 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2527 must be emitted to compute NEWADDR, put them before INSN.
2529 UNCRITICAL nonzero means accept paradoxical subregs.
2530 This is used for subregs found inside REG_NOTES. */
2532 static rtx
2533 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2534 rtx x;
2535 rtx insn;
2536 enum machine_mode promoted_mode;
2537 int uncritical;
2539 int offset;
2540 rtx mem = SUBREG_REG (x);
2541 rtx addr = XEXP (mem, 0);
2542 enum machine_mode mode = GET_MODE (x);
2543 rtx result, seq;
2545 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2546 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2547 abort ();
2549 offset = SUBREG_BYTE (x);
2550 if (BYTES_BIG_ENDIAN)
2551 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2552 the offset so that it points to the right location within the
2553 MEM. */
2554 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2556 if (!flag_force_addr
2557 && memory_address_p (mode, plus_constant (addr, offset)))
2558 /* Shortcut if no insns need be emitted. */
2559 return adjust_address (mem, mode, offset);
2561 start_sequence ();
2562 result = adjust_address (mem, mode, offset);
2563 seq = get_insns ();
2564 end_sequence ();
2566 emit_insn_before (seq, insn);
2567 return result;
2570 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2571 Replace subexpressions of X in place.
2572 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2573 Otherwise return X, with its contents possibly altered.
2575 INSN, PROMOTED_MODE and UNCRITICAL are as for
2576 fixup_memory_subreg. */
2578 static rtx
2579 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2580 rtx x;
2581 rtx insn;
2582 enum machine_mode promoted_mode;
2583 int uncritical;
2585 enum rtx_code code;
2586 const char *fmt;
2587 int i;
2589 if (x == 0)
2590 return 0;
2592 code = GET_CODE (x);
2594 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2595 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2597 /* Nothing special about this RTX; fix its operands. */
2599 fmt = GET_RTX_FORMAT (code);
2600 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2602 if (fmt[i] == 'e')
2603 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2604 promoted_mode, uncritical);
2605 else if (fmt[i] == 'E')
2607 int j;
2608 for (j = 0; j < XVECLEN (x, i); j++)
2609 XVECEXP (x, i, j)
2610 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2611 promoted_mode, uncritical);
2614 return x;
2617 /* For each memory ref within X, if it refers to a stack slot
2618 with an out of range displacement, put the address in a temp register
2619 (emitting new insns before INSN to load these registers)
2620 and alter the memory ref to use that register.
2621 Replace each such MEM rtx with a copy, to avoid clobberage. */
2623 static rtx
2624 fixup_stack_1 (x, insn)
2625 rtx x;
2626 rtx insn;
2628 int i;
2629 RTX_CODE code = GET_CODE (x);
2630 const char *fmt;
2632 if (code == MEM)
2634 rtx ad = XEXP (x, 0);
2635 /* If we have address of a stack slot but it's not valid
2636 (displacement is too large), compute the sum in a register. */
2637 if (GET_CODE (ad) == PLUS
2638 && GET_CODE (XEXP (ad, 0)) == REG
2639 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2640 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2641 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2642 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2643 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2644 #endif
2645 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2646 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2647 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2648 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2650 rtx temp, seq;
2651 if (memory_address_p (GET_MODE (x), ad))
2652 return x;
2654 start_sequence ();
2655 temp = copy_to_reg (ad);
2656 seq = get_insns ();
2657 end_sequence ();
2658 emit_insn_before (seq, insn);
2659 return replace_equiv_address (x, temp);
2661 return x;
2664 fmt = GET_RTX_FORMAT (code);
2665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2667 if (fmt[i] == 'e')
2668 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2669 else if (fmt[i] == 'E')
2671 int j;
2672 for (j = 0; j < XVECLEN (x, i); j++)
2673 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2676 return x;
2679 /* Optimization: a bit-field instruction whose field
2680 happens to be a byte or halfword in memory
2681 can be changed to a move instruction.
2683 We call here when INSN is an insn to examine or store into a bit-field.
2684 BODY is the SET-rtx to be altered.
2686 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2687 (Currently this is called only from function.c, and EQUIV_MEM
2688 is always 0.) */
2690 static void
2691 optimize_bit_field (body, insn, equiv_mem)
2692 rtx body;
2693 rtx insn;
2694 rtx *equiv_mem;
2696 rtx bitfield;
2697 int destflag;
2698 rtx seq = 0;
2699 enum machine_mode mode;
2701 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2702 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2703 bitfield = SET_DEST (body), destflag = 1;
2704 else
2705 bitfield = SET_SRC (body), destflag = 0;
2707 /* First check that the field being stored has constant size and position
2708 and is in fact a byte or halfword suitably aligned. */
2710 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2711 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2712 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2713 != BLKmode)
2714 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2716 rtx memref = 0;
2718 /* Now check that the containing word is memory, not a register,
2719 and that it is safe to change the machine mode. */
2721 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2722 memref = XEXP (bitfield, 0);
2723 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2724 && equiv_mem != 0)
2725 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2726 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2727 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2728 memref = SUBREG_REG (XEXP (bitfield, 0));
2729 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2730 && equiv_mem != 0
2731 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2732 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2734 if (memref
2735 && ! mode_dependent_address_p (XEXP (memref, 0))
2736 && ! MEM_VOLATILE_P (memref))
2738 /* Now adjust the address, first for any subreg'ing
2739 that we are now getting rid of,
2740 and then for which byte of the word is wanted. */
2742 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2743 rtx insns;
2745 /* Adjust OFFSET to count bits from low-address byte. */
2746 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2747 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2748 - offset - INTVAL (XEXP (bitfield, 1)));
2750 /* Adjust OFFSET to count bytes from low-address byte. */
2751 offset /= BITS_PER_UNIT;
2752 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2754 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2755 / UNITS_PER_WORD) * UNITS_PER_WORD;
2756 if (BYTES_BIG_ENDIAN)
2757 offset -= (MIN (UNITS_PER_WORD,
2758 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2759 - MIN (UNITS_PER_WORD,
2760 GET_MODE_SIZE (GET_MODE (memref))));
2763 start_sequence ();
2764 memref = adjust_address (memref, mode, offset);
2765 insns = get_insns ();
2766 end_sequence ();
2767 emit_insn_before (insns, insn);
2769 /* Store this memory reference where
2770 we found the bit field reference. */
2772 if (destflag)
2774 validate_change (insn, &SET_DEST (body), memref, 1);
2775 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2777 rtx src = SET_SRC (body);
2778 while (GET_CODE (src) == SUBREG
2779 && SUBREG_BYTE (src) == 0)
2780 src = SUBREG_REG (src);
2781 if (GET_MODE (src) != GET_MODE (memref))
2782 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2783 validate_change (insn, &SET_SRC (body), src, 1);
2785 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2786 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2787 /* This shouldn't happen because anything that didn't have
2788 one of these modes should have got converted explicitly
2789 and then referenced through a subreg.
2790 This is so because the original bit-field was
2791 handled by agg_mode and so its tree structure had
2792 the same mode that memref now has. */
2793 abort ();
2795 else
2797 rtx dest = SET_DEST (body);
2799 while (GET_CODE (dest) == SUBREG
2800 && SUBREG_BYTE (dest) == 0
2801 && (GET_MODE_CLASS (GET_MODE (dest))
2802 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2803 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2804 <= UNITS_PER_WORD))
2805 dest = SUBREG_REG (dest);
2807 validate_change (insn, &SET_DEST (body), dest, 1);
2809 if (GET_MODE (dest) == GET_MODE (memref))
2810 validate_change (insn, &SET_SRC (body), memref, 1);
2811 else
2813 /* Convert the mem ref to the destination mode. */
2814 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2816 start_sequence ();
2817 convert_move (newreg, memref,
2818 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2819 seq = get_insns ();
2820 end_sequence ();
2822 validate_change (insn, &SET_SRC (body), newreg, 1);
2826 /* See if we can convert this extraction or insertion into
2827 a simple move insn. We might not be able to do so if this
2828 was, for example, part of a PARALLEL.
2830 If we succeed, write out any needed conversions. If we fail,
2831 it is hard to guess why we failed, so don't do anything
2832 special; just let the optimization be suppressed. */
2834 if (apply_change_group () && seq)
2835 emit_insn_before (seq, insn);
2840 /* These routines are responsible for converting virtual register references
2841 to the actual hard register references once RTL generation is complete.
2843 The following four variables are used for communication between the
2844 routines. They contain the offsets of the virtual registers from their
2845 respective hard registers. */
2847 static int in_arg_offset;
2848 static int var_offset;
2849 static int dynamic_offset;
2850 static int out_arg_offset;
2851 static int cfa_offset;
2853 /* In most machines, the stack pointer register is equivalent to the bottom
2854 of the stack. */
2856 #ifndef STACK_POINTER_OFFSET
2857 #define STACK_POINTER_OFFSET 0
2858 #endif
2860 /* If not defined, pick an appropriate default for the offset of dynamically
2861 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2862 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2864 #ifndef STACK_DYNAMIC_OFFSET
2866 /* The bottom of the stack points to the actual arguments. If
2867 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2868 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2869 stack space for register parameters is not pushed by the caller, but
2870 rather part of the fixed stack areas and hence not included in
2871 `current_function_outgoing_args_size'. Nevertheless, we must allow
2872 for it when allocating stack dynamic objects. */
2874 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2875 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2876 ((ACCUMULATE_OUTGOING_ARGS \
2877 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2878 + (STACK_POINTER_OFFSET)) \
2880 #else
2881 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2882 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2883 + (STACK_POINTER_OFFSET))
2884 #endif
2885 #endif
2887 /* On most machines, the CFA coincides with the first incoming parm. */
2889 #ifndef ARG_POINTER_CFA_OFFSET
2890 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2891 #endif
2893 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2894 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2895 register, for later use if we do need to force REG into the stack. REG is
2896 overwritten by the MEM like in put_reg_into_stack. */
2899 gen_mem_addressof (reg, decl)
2900 rtx reg;
2901 tree decl;
2903 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2904 REGNO (reg), decl);
2906 /* Calculate this before we start messing with decl's RTL. */
2907 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2909 /* If the original REG was a user-variable, then so is the REG whose
2910 address is being taken. Likewise for unchanging. */
2911 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2912 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2914 PUT_CODE (reg, MEM);
2915 MEM_ATTRS (reg) = 0;
2916 XEXP (reg, 0) = r;
2918 if (decl)
2920 tree type = TREE_TYPE (decl);
2921 enum machine_mode decl_mode
2922 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2923 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2924 : DECL_RTL_IF_SET (decl));
2926 PUT_MODE (reg, decl_mode);
2928 /* Clear DECL_RTL momentarily so functions below will work
2929 properly, then set it again. */
2930 if (DECL_P (decl) && decl_rtl == reg)
2931 SET_DECL_RTL (decl, 0);
2933 set_mem_attributes (reg, decl, 1);
2934 set_mem_alias_set (reg, set);
2936 if (DECL_P (decl) && decl_rtl == reg)
2937 SET_DECL_RTL (decl, reg);
2939 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2940 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2942 else
2943 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2945 return reg;
2948 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2950 void
2951 flush_addressof (decl)
2952 tree decl;
2954 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2955 && DECL_RTL (decl) != 0
2956 && GET_CODE (DECL_RTL (decl)) == MEM
2957 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2958 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2959 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2962 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2964 static void
2965 put_addressof_into_stack (r, ht)
2966 rtx r;
2967 htab_t ht;
2969 tree decl, type;
2970 int volatile_p, used_p;
2972 rtx reg = XEXP (r, 0);
2974 if (GET_CODE (reg) != REG)
2975 abort ();
2977 decl = ADDRESSOF_DECL (r);
2978 if (decl)
2980 type = TREE_TYPE (decl);
2981 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2982 && TREE_THIS_VOLATILE (decl));
2983 used_p = (TREE_USED (decl)
2984 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2986 else
2988 type = NULL_TREE;
2989 volatile_p = 0;
2990 used_p = 1;
2993 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2994 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2997 /* List of replacements made below in purge_addressof_1 when creating
2998 bitfield insertions. */
2999 static rtx purge_bitfield_addressof_replacements;
3001 /* List of replacements made below in purge_addressof_1 for patterns
3002 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3003 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3004 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3005 enough in complex cases, e.g. when some field values can be
3006 extracted by usage MEM with narrower mode. */
3007 static rtx purge_addressof_replacements;
3009 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3010 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3011 the stack. If the function returns FALSE then the replacement could not
3012 be made. */
3014 static bool
3015 purge_addressof_1 (loc, insn, force, store, ht)
3016 rtx *loc;
3017 rtx insn;
3018 int force, store;
3019 htab_t ht;
3021 rtx x;
3022 RTX_CODE code;
3023 int i, j;
3024 const char *fmt;
3025 bool result = true;
3027 /* Re-start here to avoid recursion in common cases. */
3028 restart:
3030 x = *loc;
3031 if (x == 0)
3032 return true;
3034 code = GET_CODE (x);
3036 /* If we don't return in any of the cases below, we will recurse inside
3037 the RTX, which will normally result in any ADDRESSOF being forced into
3038 memory. */
3039 if (code == SET)
3041 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3042 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3043 return result;
3045 else if (code == ADDRESSOF)
3047 rtx sub, insns;
3049 if (GET_CODE (XEXP (x, 0)) != MEM)
3051 put_addressof_into_stack (x, ht);
3052 return true;
3055 /* We must create a copy of the rtx because it was created by
3056 overwriting a REG rtx which is always shared. */
3057 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3058 if (validate_change (insn, loc, sub, 0)
3059 || validate_replace_rtx (x, sub, insn))
3060 return true;
3062 start_sequence ();
3063 sub = force_operand (sub, NULL_RTX);
3064 if (! validate_change (insn, loc, sub, 0)
3065 && ! validate_replace_rtx (x, sub, insn))
3066 abort ();
3068 insns = get_insns ();
3069 end_sequence ();
3070 emit_insn_before (insns, insn);
3071 return true;
3074 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3076 rtx sub = XEXP (XEXP (x, 0), 0);
3078 if (GET_CODE (sub) == MEM)
3079 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3080 else if (GET_CODE (sub) == REG
3081 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3083 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3085 int size_x, size_sub;
3087 if (!insn)
3089 /* When processing REG_NOTES look at the list of
3090 replacements done on the insn to find the register that X
3091 was replaced by. */
3092 rtx tem;
3094 for (tem = purge_bitfield_addressof_replacements;
3095 tem != NULL_RTX;
3096 tem = XEXP (XEXP (tem, 1), 1))
3097 if (rtx_equal_p (x, XEXP (tem, 0)))
3099 *loc = XEXP (XEXP (tem, 1), 0);
3100 return true;
3103 /* See comment for purge_addressof_replacements. */
3104 for (tem = purge_addressof_replacements;
3105 tem != NULL_RTX;
3106 tem = XEXP (XEXP (tem, 1), 1))
3107 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3109 rtx z = XEXP (XEXP (tem, 1), 0);
3111 if (GET_MODE (x) == GET_MODE (z)
3112 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3113 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3114 abort ();
3116 /* It can happen that the note may speak of things
3117 in a wider (or just different) mode than the
3118 code did. This is especially true of
3119 REG_RETVAL. */
3121 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3122 z = SUBREG_REG (z);
3124 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3125 && (GET_MODE_SIZE (GET_MODE (x))
3126 > GET_MODE_SIZE (GET_MODE (z))))
3128 /* This can occur as a result in invalid
3129 pointer casts, e.g. float f; ...
3130 *(long long int *)&f.
3131 ??? We could emit a warning here, but
3132 without a line number that wouldn't be
3133 very helpful. */
3134 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3136 else
3137 z = gen_lowpart (GET_MODE (x), z);
3139 *loc = z;
3140 return true;
3143 /* Sometimes we may not be able to find the replacement. For
3144 example when the original insn was a MEM in a wider mode,
3145 and the note is part of a sign extension of a narrowed
3146 version of that MEM. Gcc testcase compile/990829-1.c can
3147 generate an example of this situation. Rather than complain
3148 we return false, which will prompt our caller to remove the
3149 offending note. */
3150 return false;
3153 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3154 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3156 /* Don't even consider working with paradoxical subregs,
3157 or the moral equivalent seen here. */
3158 if (size_x <= size_sub
3159 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3161 /* Do a bitfield insertion to mirror what would happen
3162 in memory. */
3164 rtx val, seq;
3166 if (store)
3168 rtx p = PREV_INSN (insn);
3170 start_sequence ();
3171 val = gen_reg_rtx (GET_MODE (x));
3172 if (! validate_change (insn, loc, val, 0))
3174 /* Discard the current sequence and put the
3175 ADDRESSOF on stack. */
3176 end_sequence ();
3177 goto give_up;
3179 seq = get_insns ();
3180 end_sequence ();
3181 emit_insn_before (seq, insn);
3182 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3183 insn, ht);
3185 start_sequence ();
3186 store_bit_field (sub, size_x, 0, GET_MODE (x),
3187 val, GET_MODE_SIZE (GET_MODE (sub)));
3189 /* Make sure to unshare any shared rtl that store_bit_field
3190 might have created. */
3191 unshare_all_rtl_again (get_insns ());
3193 seq = get_insns ();
3194 end_sequence ();
3195 p = emit_insn_after (seq, insn);
3196 if (NEXT_INSN (insn))
3197 compute_insns_for_mem (NEXT_INSN (insn),
3198 p ? NEXT_INSN (p) : NULL_RTX,
3199 ht);
3201 else
3203 rtx p = PREV_INSN (insn);
3205 start_sequence ();
3206 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3207 GET_MODE (x), GET_MODE (x),
3208 GET_MODE_SIZE (GET_MODE (sub)));
3210 if (! validate_change (insn, loc, val, 0))
3212 /* Discard the current sequence and put the
3213 ADDRESSOF on stack. */
3214 end_sequence ();
3215 goto give_up;
3218 seq = get_insns ();
3219 end_sequence ();
3220 emit_insn_before (seq, insn);
3221 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3222 insn, ht);
3225 /* Remember the replacement so that the same one can be done
3226 on the REG_NOTES. */
3227 purge_bitfield_addressof_replacements
3228 = gen_rtx_EXPR_LIST (VOIDmode, x,
3229 gen_rtx_EXPR_LIST
3230 (VOIDmode, val,
3231 purge_bitfield_addressof_replacements));
3233 /* We replaced with a reg -- all done. */
3234 return true;
3238 else if (validate_change (insn, loc, sub, 0))
3240 /* Remember the replacement so that the same one can be done
3241 on the REG_NOTES. */
3242 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3244 rtx tem;
3246 for (tem = purge_addressof_replacements;
3247 tem != NULL_RTX;
3248 tem = XEXP (XEXP (tem, 1), 1))
3249 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3251 XEXP (XEXP (tem, 1), 0) = sub;
3252 return true;
3254 purge_addressof_replacements
3255 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3256 gen_rtx_EXPR_LIST (VOIDmode, sub,
3257 purge_addressof_replacements));
3258 return true;
3260 goto restart;
3264 give_up:
3265 /* Scan all subexpressions. */
3266 fmt = GET_RTX_FORMAT (code);
3267 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3269 if (*fmt == 'e')
3270 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3271 else if (*fmt == 'E')
3272 for (j = 0; j < XVECLEN (x, i); j++)
3273 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3276 return result;
3279 /* Return a hash value for K, a REG. */
3281 static hashval_t
3282 insns_for_mem_hash (k)
3283 const void * k;
3285 /* Use the address of the key for the hash value. */
3286 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3287 return (hashval_t) m->key;
3290 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3292 static int
3293 insns_for_mem_comp (k1, k2)
3294 const void * k1;
3295 const void * k2;
3297 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3298 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3299 return m1->key == m2->key;
3302 struct insns_for_mem_walk_info
3304 /* The hash table that we are using to record which INSNs use which
3305 MEMs. */
3306 htab_t ht;
3308 /* The INSN we are currently processing. */
3309 rtx insn;
3311 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3312 to find the insns that use the REGs in the ADDRESSOFs. */
3313 int pass;
3316 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3317 that might be used in an ADDRESSOF expression, record this INSN in
3318 the hash table given by DATA (which is really a pointer to an
3319 insns_for_mem_walk_info structure). */
3321 static int
3322 insns_for_mem_walk (r, data)
3323 rtx *r;
3324 void *data;
3326 struct insns_for_mem_walk_info *ifmwi
3327 = (struct insns_for_mem_walk_info *) data;
3328 struct insns_for_mem_entry tmp;
3329 tmp.insns = NULL_RTX;
3331 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3332 && GET_CODE (XEXP (*r, 0)) == REG)
3334 PTR *e;
3335 tmp.key = XEXP (*r, 0);
3336 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3337 if (*e == NULL)
3339 *e = ggc_alloc (sizeof (tmp));
3340 memcpy (*e, &tmp, sizeof (tmp));
3343 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3345 struct insns_for_mem_entry *ifme;
3346 tmp.key = *r;
3347 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3349 /* If we have not already recorded this INSN, do so now. Since
3350 we process the INSNs in order, we know that if we have
3351 recorded it it must be at the front of the list. */
3352 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3353 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3354 ifme->insns);
3357 return 0;
3360 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3361 which REGs in HT. */
3363 static void
3364 compute_insns_for_mem (insns, last_insn, ht)
3365 rtx insns;
3366 rtx last_insn;
3367 htab_t ht;
3369 rtx insn;
3370 struct insns_for_mem_walk_info ifmwi;
3371 ifmwi.ht = ht;
3373 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3374 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3375 if (INSN_P (insn))
3377 ifmwi.insn = insn;
3378 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3382 /* Helper function for purge_addressof called through for_each_rtx.
3383 Returns true iff the rtl is an ADDRESSOF. */
3385 static int
3386 is_addressof (rtl, data)
3387 rtx *rtl;
3388 void *data ATTRIBUTE_UNUSED;
3390 return GET_CODE (*rtl) == ADDRESSOF;
3393 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3394 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3395 stack. */
3397 void
3398 purge_addressof (insns)
3399 rtx insns;
3401 rtx insn;
3402 htab_t ht;
3404 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3405 requires a fixup pass over the instruction stream to correct
3406 INSNs that depended on the REG being a REG, and not a MEM. But,
3407 these fixup passes are slow. Furthermore, most MEMs are not
3408 mentioned in very many instructions. So, we speed up the process
3409 by pre-calculating which REGs occur in which INSNs; that allows
3410 us to perform the fixup passes much more quickly. */
3411 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3412 compute_insns_for_mem (insns, NULL_RTX, ht);
3414 for (insn = insns; insn; insn = NEXT_INSN (insn))
3415 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3416 || GET_CODE (insn) == CALL_INSN)
3418 if (! purge_addressof_1 (&PATTERN (insn), insn,
3419 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3420 /* If we could not replace the ADDRESSOFs in the insn,
3421 something is wrong. */
3422 abort ();
3424 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, ht))
3426 /* If we could not replace the ADDRESSOFs in the insn's notes,
3427 we can just remove the offending notes instead. */
3428 rtx note;
3430 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3432 /* If we find a REG_RETVAL note then the insn is a libcall.
3433 Such insns must have REG_EQUAL notes as well, in order
3434 for later passes of the compiler to work. So it is not
3435 safe to delete the notes here, and instead we abort. */
3436 if (REG_NOTE_KIND (note) == REG_RETVAL)
3437 abort ();
3438 if (for_each_rtx (&note, is_addressof, NULL))
3439 remove_note (insn, note);
3444 /* Clean up. */
3445 purge_bitfield_addressof_replacements = 0;
3446 purge_addressof_replacements = 0;
3448 /* REGs are shared. purge_addressof will destructively replace a REG
3449 with a MEM, which creates shared MEMs.
3451 Unfortunately, the children of put_reg_into_stack assume that MEMs
3452 referring to the same stack slot are shared (fixup_var_refs and
3453 the associated hash table code).
3455 So, we have to do another unsharing pass after we have flushed any
3456 REGs that had their address taken into the stack.
3458 It may be worth tracking whether or not we converted any REGs into
3459 MEMs to avoid this overhead when it is not needed. */
3460 unshare_all_rtl_again (get_insns ());
3463 /* Convert a SET of a hard subreg to a set of the appropriate hard
3464 register. A subroutine of purge_hard_subreg_sets. */
3466 static void
3467 purge_single_hard_subreg_set (pattern)
3468 rtx pattern;
3470 rtx reg = SET_DEST (pattern);
3471 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3472 int offset = 0;
3474 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3475 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3477 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3478 GET_MODE (SUBREG_REG (reg)),
3479 SUBREG_BYTE (reg),
3480 GET_MODE (reg));
3481 reg = SUBREG_REG (reg);
3485 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3487 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3488 SET_DEST (pattern) = reg;
3492 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3493 only such SETs that we expect to see are those left in because
3494 integrate can't handle sets of parts of a return value register.
3496 We don't use alter_subreg because we only want to eliminate subregs
3497 of hard registers. */
3499 void
3500 purge_hard_subreg_sets (insn)
3501 rtx insn;
3503 for (; insn; insn = NEXT_INSN (insn))
3505 if (INSN_P (insn))
3507 rtx pattern = PATTERN (insn);
3508 switch (GET_CODE (pattern))
3510 case SET:
3511 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3512 purge_single_hard_subreg_set (pattern);
3513 break;
3514 case PARALLEL:
3516 int j;
3517 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3519 rtx inner_pattern = XVECEXP (pattern, 0, j);
3520 if (GET_CODE (inner_pattern) == SET
3521 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3522 purge_single_hard_subreg_set (inner_pattern);
3525 break;
3526 default:
3527 break;
3533 /* Pass through the INSNS of function FNDECL and convert virtual register
3534 references to hard register references. */
3536 void
3537 instantiate_virtual_regs (fndecl, insns)
3538 tree fndecl;
3539 rtx insns;
3541 rtx insn;
3542 unsigned int i;
3544 /* Compute the offsets to use for this function. */
3545 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3546 var_offset = STARTING_FRAME_OFFSET;
3547 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3548 out_arg_offset = STACK_POINTER_OFFSET;
3549 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3551 /* Scan all variables and parameters of this function. For each that is
3552 in memory, instantiate all virtual registers if the result is a valid
3553 address. If not, we do it later. That will handle most uses of virtual
3554 regs on many machines. */
3555 instantiate_decls (fndecl, 1);
3557 /* Initialize recognition, indicating that volatile is OK. */
3558 init_recog ();
3560 /* Scan through all the insns, instantiating every virtual register still
3561 present. */
3562 for (insn = insns; insn; insn = NEXT_INSN (insn))
3563 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3564 || GET_CODE (insn) == CALL_INSN)
3566 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3567 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3568 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3569 if (GET_CODE (insn) == CALL_INSN)
3570 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3571 NULL_RTX, 0);
3574 /* Instantiate the stack slots for the parm registers, for later use in
3575 addressof elimination. */
3576 for (i = 0; i < max_parm_reg; ++i)
3577 if (parm_reg_stack_loc[i])
3578 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3580 /* Now instantiate the remaining register equivalences for debugging info.
3581 These will not be valid addresses. */
3582 instantiate_decls (fndecl, 0);
3584 /* Indicate that, from now on, assign_stack_local should use
3585 frame_pointer_rtx. */
3586 virtuals_instantiated = 1;
3589 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3590 all virtual registers in their DECL_RTL's.
3592 If VALID_ONLY, do this only if the resulting address is still valid.
3593 Otherwise, always do it. */
3595 static void
3596 instantiate_decls (fndecl, valid_only)
3597 tree fndecl;
3598 int valid_only;
3600 tree decl;
3602 /* Process all parameters of the function. */
3603 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3605 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3606 HOST_WIDE_INT size_rtl;
3608 instantiate_decl (DECL_RTL (decl), size, valid_only);
3610 /* If the parameter was promoted, then the incoming RTL mode may be
3611 larger than the declared type size. We must use the larger of
3612 the two sizes. */
3613 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3614 size = MAX (size_rtl, size);
3615 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3618 /* Now process all variables defined in the function or its subblocks. */
3619 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3622 /* Subroutine of instantiate_decls: Process all decls in the given
3623 BLOCK node and all its subblocks. */
3625 static void
3626 instantiate_decls_1 (let, valid_only)
3627 tree let;
3628 int valid_only;
3630 tree t;
3632 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3633 if (DECL_RTL_SET_P (t))
3634 instantiate_decl (DECL_RTL (t),
3635 int_size_in_bytes (TREE_TYPE (t)),
3636 valid_only);
3638 /* Process all subblocks. */
3639 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3640 instantiate_decls_1 (t, valid_only);
3643 /* Subroutine of the preceding procedures: Given RTL representing a
3644 decl and the size of the object, do any instantiation required.
3646 If VALID_ONLY is non-zero, it means that the RTL should only be
3647 changed if the new address is valid. */
3649 static void
3650 instantiate_decl (x, size, valid_only)
3651 rtx x;
3652 HOST_WIDE_INT size;
3653 int valid_only;
3655 enum machine_mode mode;
3656 rtx addr;
3658 /* If this is not a MEM, no need to do anything. Similarly if the
3659 address is a constant or a register that is not a virtual register. */
3661 if (x == 0 || GET_CODE (x) != MEM)
3662 return;
3664 addr = XEXP (x, 0);
3665 if (CONSTANT_P (addr)
3666 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3667 || (GET_CODE (addr) == REG
3668 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3669 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3670 return;
3672 /* If we should only do this if the address is valid, copy the address.
3673 We need to do this so we can undo any changes that might make the
3674 address invalid. This copy is unfortunate, but probably can't be
3675 avoided. */
3677 if (valid_only)
3678 addr = copy_rtx (addr);
3680 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3682 if (valid_only && size >= 0)
3684 unsigned HOST_WIDE_INT decl_size = size;
3686 /* Now verify that the resulting address is valid for every integer or
3687 floating-point mode up to and including SIZE bytes long. We do this
3688 since the object might be accessed in any mode and frame addresses
3689 are shared. */
3691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3692 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3693 mode = GET_MODE_WIDER_MODE (mode))
3694 if (! memory_address_p (mode, addr))
3695 return;
3697 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3698 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3699 mode = GET_MODE_WIDER_MODE (mode))
3700 if (! memory_address_p (mode, addr))
3701 return;
3704 /* Put back the address now that we have updated it and we either know
3705 it is valid or we don't care whether it is valid. */
3707 XEXP (x, 0) = addr;
3710 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3711 is a virtual register, return the equivalent hard register and set the
3712 offset indirectly through the pointer. Otherwise, return 0. */
3714 static rtx
3715 instantiate_new_reg (x, poffset)
3716 rtx x;
3717 HOST_WIDE_INT *poffset;
3719 rtx new;
3720 HOST_WIDE_INT offset;
3722 if (x == virtual_incoming_args_rtx)
3723 new = arg_pointer_rtx, offset = in_arg_offset;
3724 else if (x == virtual_stack_vars_rtx)
3725 new = frame_pointer_rtx, offset = var_offset;
3726 else if (x == virtual_stack_dynamic_rtx)
3727 new = stack_pointer_rtx, offset = dynamic_offset;
3728 else if (x == virtual_outgoing_args_rtx)
3729 new = stack_pointer_rtx, offset = out_arg_offset;
3730 else if (x == virtual_cfa_rtx)
3731 new = arg_pointer_rtx, offset = cfa_offset;
3732 else
3733 return 0;
3735 *poffset = offset;
3736 return new;
3739 /* Given a pointer to a piece of rtx and an optional pointer to the
3740 containing object, instantiate any virtual registers present in it.
3742 If EXTRA_INSNS, we always do the replacement and generate
3743 any extra insns before OBJECT. If it zero, we do nothing if replacement
3744 is not valid.
3746 Return 1 if we either had nothing to do or if we were able to do the
3747 needed replacement. Return 0 otherwise; we only return zero if
3748 EXTRA_INSNS is zero.
3750 We first try some simple transformations to avoid the creation of extra
3751 pseudos. */
3753 static int
3754 instantiate_virtual_regs_1 (loc, object, extra_insns)
3755 rtx *loc;
3756 rtx object;
3757 int extra_insns;
3759 rtx x;
3760 RTX_CODE code;
3761 rtx new = 0;
3762 HOST_WIDE_INT offset = 0;
3763 rtx temp;
3764 rtx seq;
3765 int i, j;
3766 const char *fmt;
3768 /* Re-start here to avoid recursion in common cases. */
3769 restart:
3771 x = *loc;
3772 if (x == 0)
3773 return 1;
3775 code = GET_CODE (x);
3777 /* Check for some special cases. */
3778 switch (code)
3780 case CONST_INT:
3781 case CONST_DOUBLE:
3782 case CONST_VECTOR:
3783 case CONST:
3784 case SYMBOL_REF:
3785 case CODE_LABEL:
3786 case PC:
3787 case CC0:
3788 case ASM_INPUT:
3789 case ADDR_VEC:
3790 case ADDR_DIFF_VEC:
3791 case RETURN:
3792 return 1;
3794 case SET:
3795 /* We are allowed to set the virtual registers. This means that
3796 the actual register should receive the source minus the
3797 appropriate offset. This is used, for example, in the handling
3798 of non-local gotos. */
3799 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3801 rtx src = SET_SRC (x);
3803 /* We are setting the register, not using it, so the relevant
3804 offset is the negative of the offset to use were we using
3805 the register. */
3806 offset = - offset;
3807 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3809 /* The only valid sources here are PLUS or REG. Just do
3810 the simplest possible thing to handle them. */
3811 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3812 abort ();
3814 start_sequence ();
3815 if (GET_CODE (src) != REG)
3816 temp = force_operand (src, NULL_RTX);
3817 else
3818 temp = src;
3819 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3820 seq = get_insns ();
3821 end_sequence ();
3823 emit_insn_before (seq, object);
3824 SET_DEST (x) = new;
3826 if (! validate_change (object, &SET_SRC (x), temp, 0)
3827 || ! extra_insns)
3828 abort ();
3830 return 1;
3833 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3834 loc = &SET_SRC (x);
3835 goto restart;
3837 case PLUS:
3838 /* Handle special case of virtual register plus constant. */
3839 if (CONSTANT_P (XEXP (x, 1)))
3841 rtx old, new_offset;
3843 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3844 if (GET_CODE (XEXP (x, 0)) == PLUS)
3846 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3848 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3849 extra_insns);
3850 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3852 else
3854 loc = &XEXP (x, 0);
3855 goto restart;
3859 #ifdef POINTERS_EXTEND_UNSIGNED
3860 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3861 we can commute the PLUS and SUBREG because pointers into the
3862 frame are well-behaved. */
3863 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3864 && GET_CODE (XEXP (x, 1)) == CONST_INT
3865 && 0 != (new
3866 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3867 &offset))
3868 && validate_change (object, loc,
3869 plus_constant (gen_lowpart (ptr_mode,
3870 new),
3871 offset
3872 + INTVAL (XEXP (x, 1))),
3874 return 1;
3875 #endif
3876 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3878 /* We know the second operand is a constant. Unless the
3879 first operand is a REG (which has been already checked),
3880 it needs to be checked. */
3881 if (GET_CODE (XEXP (x, 0)) != REG)
3883 loc = &XEXP (x, 0);
3884 goto restart;
3886 return 1;
3889 new_offset = plus_constant (XEXP (x, 1), offset);
3891 /* If the new constant is zero, try to replace the sum with just
3892 the register. */
3893 if (new_offset == const0_rtx
3894 && validate_change (object, loc, new, 0))
3895 return 1;
3897 /* Next try to replace the register and new offset.
3898 There are two changes to validate here and we can't assume that
3899 in the case of old offset equals new just changing the register
3900 will yield a valid insn. In the interests of a little efficiency,
3901 however, we only call validate change once (we don't queue up the
3902 changes and then call apply_change_group). */
3904 old = XEXP (x, 0);
3905 if (offset == 0
3906 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3907 : (XEXP (x, 0) = new,
3908 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3910 if (! extra_insns)
3912 XEXP (x, 0) = old;
3913 return 0;
3916 /* Otherwise copy the new constant into a register and replace
3917 constant with that register. */
3918 temp = gen_reg_rtx (Pmode);
3919 XEXP (x, 0) = new;
3920 if (validate_change (object, &XEXP (x, 1), temp, 0))
3921 emit_insn_before (gen_move_insn (temp, new_offset), object);
3922 else
3924 /* If that didn't work, replace this expression with a
3925 register containing the sum. */
3927 XEXP (x, 0) = old;
3928 new = gen_rtx_PLUS (Pmode, new, new_offset);
3930 start_sequence ();
3931 temp = force_operand (new, NULL_RTX);
3932 seq = get_insns ();
3933 end_sequence ();
3935 emit_insn_before (seq, object);
3936 if (! validate_change (object, loc, temp, 0)
3937 && ! validate_replace_rtx (x, temp, object))
3938 abort ();
3942 return 1;
3945 /* Fall through to generic two-operand expression case. */
3946 case EXPR_LIST:
3947 case CALL:
3948 case COMPARE:
3949 case MINUS:
3950 case MULT:
3951 case DIV: case UDIV:
3952 case MOD: case UMOD:
3953 case AND: case IOR: case XOR:
3954 case ROTATERT: case ROTATE:
3955 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3956 case NE: case EQ:
3957 case GE: case GT: case GEU: case GTU:
3958 case LE: case LT: case LEU: case LTU:
3959 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3960 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3961 loc = &XEXP (x, 0);
3962 goto restart;
3964 case MEM:
3965 /* Most cases of MEM that convert to valid addresses have already been
3966 handled by our scan of decls. The only special handling we
3967 need here is to make a copy of the rtx to ensure it isn't being
3968 shared if we have to change it to a pseudo.
3970 If the rtx is a simple reference to an address via a virtual register,
3971 it can potentially be shared. In such cases, first try to make it
3972 a valid address, which can also be shared. Otherwise, copy it and
3973 proceed normally.
3975 First check for common cases that need no processing. These are
3976 usually due to instantiation already being done on a previous instance
3977 of a shared rtx. */
3979 temp = XEXP (x, 0);
3980 if (CONSTANT_ADDRESS_P (temp)
3981 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3982 || temp == arg_pointer_rtx
3983 #endif
3984 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3985 || temp == hard_frame_pointer_rtx
3986 #endif
3987 || temp == frame_pointer_rtx)
3988 return 1;
3990 if (GET_CODE (temp) == PLUS
3991 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3992 && (XEXP (temp, 0) == frame_pointer_rtx
3993 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3994 || XEXP (temp, 0) == hard_frame_pointer_rtx
3995 #endif
3996 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3997 || XEXP (temp, 0) == arg_pointer_rtx
3998 #endif
4000 return 1;
4002 if (temp == virtual_stack_vars_rtx
4003 || temp == virtual_incoming_args_rtx
4004 || (GET_CODE (temp) == PLUS
4005 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4006 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4007 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4009 /* This MEM may be shared. If the substitution can be done without
4010 the need to generate new pseudos, we want to do it in place
4011 so all copies of the shared rtx benefit. The call below will
4012 only make substitutions if the resulting address is still
4013 valid.
4015 Note that we cannot pass X as the object in the recursive call
4016 since the insn being processed may not allow all valid
4017 addresses. However, if we were not passed on object, we can
4018 only modify X without copying it if X will have a valid
4019 address.
4021 ??? Also note that this can still lose if OBJECT is an insn that
4022 has less restrictions on an address that some other insn.
4023 In that case, we will modify the shared address. This case
4024 doesn't seem very likely, though. One case where this could
4025 happen is in the case of a USE or CLOBBER reference, but we
4026 take care of that below. */
4028 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4029 object ? object : x, 0))
4030 return 1;
4032 /* Otherwise make a copy and process that copy. We copy the entire
4033 RTL expression since it might be a PLUS which could also be
4034 shared. */
4035 *loc = x = copy_rtx (x);
4038 /* Fall through to generic unary operation case. */
4039 case PREFETCH:
4040 case SUBREG:
4041 case STRICT_LOW_PART:
4042 case NEG: case NOT:
4043 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4044 case SIGN_EXTEND: case ZERO_EXTEND:
4045 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4046 case FLOAT: case FIX:
4047 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4048 case ABS:
4049 case SQRT:
4050 case FFS:
4051 /* These case either have just one operand or we know that we need not
4052 check the rest of the operands. */
4053 loc = &XEXP (x, 0);
4054 goto restart;
4056 case USE:
4057 case CLOBBER:
4058 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4059 go ahead and make the invalid one, but do it to a copy. For a REG,
4060 just make the recursive call, since there's no chance of a problem. */
4062 if ((GET_CODE (XEXP (x, 0)) == MEM
4063 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4065 || (GET_CODE (XEXP (x, 0)) == REG
4066 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4067 return 1;
4069 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4070 loc = &XEXP (x, 0);
4071 goto restart;
4073 case REG:
4074 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4075 in front of this insn and substitute the temporary. */
4076 if ((new = instantiate_new_reg (x, &offset)) != 0)
4078 temp = plus_constant (new, offset);
4079 if (!validate_change (object, loc, temp, 0))
4081 if (! extra_insns)
4082 return 0;
4084 start_sequence ();
4085 temp = force_operand (temp, NULL_RTX);
4086 seq = get_insns ();
4087 end_sequence ();
4089 emit_insn_before (seq, object);
4090 if (! validate_change (object, loc, temp, 0)
4091 && ! validate_replace_rtx (x, temp, object))
4092 abort ();
4096 return 1;
4098 case ADDRESSOF:
4099 if (GET_CODE (XEXP (x, 0)) == REG)
4100 return 1;
4102 else if (GET_CODE (XEXP (x, 0)) == MEM)
4104 /* If we have a (addressof (mem ..)), do any instantiation inside
4105 since we know we'll be making the inside valid when we finally
4106 remove the ADDRESSOF. */
4107 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4108 return 1;
4110 break;
4112 default:
4113 break;
4116 /* Scan all subexpressions. */
4117 fmt = GET_RTX_FORMAT (code);
4118 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4119 if (*fmt == 'e')
4121 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4122 return 0;
4124 else if (*fmt == 'E')
4125 for (j = 0; j < XVECLEN (x, i); j++)
4126 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4127 extra_insns))
4128 return 0;
4130 return 1;
4133 /* Optimization: assuming this function does not receive nonlocal gotos,
4134 delete the handlers for such, as well as the insns to establish
4135 and disestablish them. */
4137 static void
4138 delete_handlers ()
4140 rtx insn;
4141 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4143 /* Delete the handler by turning off the flag that would
4144 prevent jump_optimize from deleting it.
4145 Also permit deletion of the nonlocal labels themselves
4146 if nothing local refers to them. */
4147 if (GET_CODE (insn) == CODE_LABEL)
4149 tree t, last_t;
4151 LABEL_PRESERVE_P (insn) = 0;
4153 /* Remove it from the nonlocal_label list, to avoid confusing
4154 flow. */
4155 for (t = nonlocal_labels, last_t = 0; t;
4156 last_t = t, t = TREE_CHAIN (t))
4157 if (DECL_RTL (TREE_VALUE (t)) == insn)
4158 break;
4159 if (t)
4161 if (! last_t)
4162 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4163 else
4164 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4167 if (GET_CODE (insn) == INSN)
4169 int can_delete = 0;
4170 rtx t;
4171 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4172 if (reg_mentioned_p (t, PATTERN (insn)))
4174 can_delete = 1;
4175 break;
4177 if (can_delete
4178 || (nonlocal_goto_stack_level != 0
4179 && reg_mentioned_p (nonlocal_goto_stack_level,
4180 PATTERN (insn))))
4181 delete_related_insns (insn);
4187 max_parm_reg_num ()
4189 return max_parm_reg;
4192 /* Return the first insn following those generated by `assign_parms'. */
4195 get_first_nonparm_insn ()
4197 if (last_parm_insn)
4198 return NEXT_INSN (last_parm_insn);
4199 return get_insns ();
4202 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4203 Crash if there is none. */
4206 get_first_block_beg ()
4208 rtx searcher;
4209 rtx insn = get_first_nonparm_insn ();
4211 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4212 if (GET_CODE (searcher) == NOTE
4213 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4214 return searcher;
4216 abort (); /* Invalid call to this function. (See comments above.) */
4217 return NULL_RTX;
4220 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4221 This means a type for which function calls must pass an address to the
4222 function or get an address back from the function.
4223 EXP may be a type node or an expression (whose type is tested). */
4226 aggregate_value_p (exp)
4227 tree exp;
4229 int i, regno, nregs;
4230 rtx reg;
4232 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4234 if (TREE_CODE (type) == VOID_TYPE)
4235 return 0;
4236 if (RETURN_IN_MEMORY (type))
4237 return 1;
4238 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4239 and thus can't be returned in registers. */
4240 if (TREE_ADDRESSABLE (type))
4241 return 1;
4242 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4243 return 1;
4244 /* Make sure we have suitable call-clobbered regs to return
4245 the value in; if not, we must return it in memory. */
4246 reg = hard_function_value (type, 0, 0);
4248 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4249 it is OK. */
4250 if (GET_CODE (reg) != REG)
4251 return 0;
4253 regno = REGNO (reg);
4254 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4255 for (i = 0; i < nregs; i++)
4256 if (! call_used_regs[regno + i])
4257 return 1;
4258 return 0;
4261 /* Assign RTL expressions to the function's parameters.
4262 This may involve copying them into registers and using
4263 those registers as the RTL for them. */
4265 void
4266 assign_parms (fndecl)
4267 tree fndecl;
4269 tree parm;
4270 rtx entry_parm = 0;
4271 rtx stack_parm = 0;
4272 CUMULATIVE_ARGS args_so_far;
4273 enum machine_mode promoted_mode, passed_mode;
4274 enum machine_mode nominal_mode, promoted_nominal_mode;
4275 int unsignedp;
4276 /* Total space needed so far for args on the stack,
4277 given as a constant and a tree-expression. */
4278 struct args_size stack_args_size;
4279 tree fntype = TREE_TYPE (fndecl);
4280 tree fnargs = DECL_ARGUMENTS (fndecl);
4281 /* This is used for the arg pointer when referring to stack args. */
4282 rtx internal_arg_pointer;
4283 /* This is a dummy PARM_DECL that we used for the function result if
4284 the function returns a structure. */
4285 tree function_result_decl = 0;
4286 #ifdef SETUP_INCOMING_VARARGS
4287 int varargs_setup = 0;
4288 #endif
4289 rtx conversion_insns = 0;
4290 struct args_size alignment_pad;
4292 /* Nonzero if function takes extra anonymous args.
4293 This means the last named arg must be on the stack
4294 right before the anonymous ones. */
4295 int stdarg
4296 = (TYPE_ARG_TYPES (fntype) != 0
4297 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4298 != void_type_node));
4300 current_function_stdarg = stdarg;
4302 /* If the reg that the virtual arg pointer will be translated into is
4303 not a fixed reg or is the stack pointer, make a copy of the virtual
4304 arg pointer, and address parms via the copy. The frame pointer is
4305 considered fixed even though it is not marked as such.
4307 The second time through, simply use ap to avoid generating rtx. */
4309 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4310 || ! (fixed_regs[ARG_POINTER_REGNUM]
4311 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4312 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4313 else
4314 internal_arg_pointer = virtual_incoming_args_rtx;
4315 current_function_internal_arg_pointer = internal_arg_pointer;
4317 stack_args_size.constant = 0;
4318 stack_args_size.var = 0;
4320 /* If struct value address is treated as the first argument, make it so. */
4321 if (aggregate_value_p (DECL_RESULT (fndecl))
4322 && ! current_function_returns_pcc_struct
4323 && struct_value_incoming_rtx == 0)
4325 tree type = build_pointer_type (TREE_TYPE (fntype));
4327 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4329 DECL_ARG_TYPE (function_result_decl) = type;
4330 TREE_CHAIN (function_result_decl) = fnargs;
4331 fnargs = function_result_decl;
4334 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4335 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4337 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4338 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4339 #else
4340 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4341 #endif
4343 /* We haven't yet found an argument that we must push and pretend the
4344 caller did. */
4345 current_function_pretend_args_size = 0;
4347 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4349 struct args_size stack_offset;
4350 struct args_size arg_size;
4351 int passed_pointer = 0;
4352 int did_conversion = 0;
4353 tree passed_type = DECL_ARG_TYPE (parm);
4354 tree nominal_type = TREE_TYPE (parm);
4355 int pretend_named;
4356 int last_named = 0, named_arg;
4358 /* Set LAST_NAMED if this is last named arg before last
4359 anonymous args. */
4360 if (stdarg)
4362 tree tem;
4364 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4365 if (DECL_NAME (tem))
4366 break;
4368 if (tem == 0)
4369 last_named = 1;
4371 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4372 most machines, if this is a varargs/stdarg function, then we treat
4373 the last named arg as if it were anonymous too. */
4374 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4376 if (TREE_TYPE (parm) == error_mark_node
4377 /* This can happen after weird syntax errors
4378 or if an enum type is defined among the parms. */
4379 || TREE_CODE (parm) != PARM_DECL
4380 || passed_type == NULL)
4382 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4383 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4384 TREE_USED (parm) = 1;
4385 continue;
4388 /* Find mode of arg as it is passed, and mode of arg
4389 as it should be during execution of this function. */
4390 passed_mode = TYPE_MODE (passed_type);
4391 nominal_mode = TYPE_MODE (nominal_type);
4393 /* If the parm's mode is VOID, its value doesn't matter,
4394 and avoid the usual things like emit_move_insn that could crash. */
4395 if (nominal_mode == VOIDmode)
4397 SET_DECL_RTL (parm, const0_rtx);
4398 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4399 continue;
4402 /* If the parm is to be passed as a transparent union, use the
4403 type of the first field for the tests below. We have already
4404 verified that the modes are the same. */
4405 if (DECL_TRANSPARENT_UNION (parm)
4406 || (TREE_CODE (passed_type) == UNION_TYPE
4407 && TYPE_TRANSPARENT_UNION (passed_type)))
4408 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4410 /* See if this arg was passed by invisible reference. It is if
4411 it is an object whose size depends on the contents of the
4412 object itself or if the machine requires these objects be passed
4413 that way. */
4415 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4416 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4417 || TREE_ADDRESSABLE (passed_type)
4418 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4419 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4420 passed_type, named_arg)
4421 #endif
4424 passed_type = nominal_type = build_pointer_type (passed_type);
4425 passed_pointer = 1;
4426 passed_mode = nominal_mode = Pmode;
4428 /* See if the frontend wants to pass this by invisible reference. */
4429 else if (passed_type != nominal_type
4430 && POINTER_TYPE_P (passed_type)
4431 && TREE_TYPE (passed_type) == nominal_type)
4433 nominal_type = passed_type;
4434 passed_pointer = 1;
4435 passed_mode = nominal_mode = Pmode;
4438 promoted_mode = passed_mode;
4440 #ifdef PROMOTE_FUNCTION_ARGS
4441 /* Compute the mode in which the arg is actually extended to. */
4442 unsignedp = TREE_UNSIGNED (passed_type);
4443 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4444 #endif
4446 /* Let machine desc say which reg (if any) the parm arrives in.
4447 0 means it arrives on the stack. */
4448 #ifdef FUNCTION_INCOMING_ARG
4449 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4450 passed_type, named_arg);
4451 #else
4452 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4453 passed_type, named_arg);
4454 #endif
4456 if (entry_parm == 0)
4457 promoted_mode = passed_mode;
4459 #ifdef SETUP_INCOMING_VARARGS
4460 /* If this is the last named parameter, do any required setup for
4461 varargs or stdargs. We need to know about the case of this being an
4462 addressable type, in which case we skip the registers it
4463 would have arrived in.
4465 For stdargs, LAST_NAMED will be set for two parameters, the one that
4466 is actually the last named, and the dummy parameter. We only
4467 want to do this action once.
4469 Also, indicate when RTL generation is to be suppressed. */
4470 if (last_named && !varargs_setup)
4472 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4473 current_function_pretend_args_size, 0);
4474 varargs_setup = 1;
4476 #endif
4478 /* Determine parm's home in the stack,
4479 in case it arrives in the stack or we should pretend it did.
4481 Compute the stack position and rtx where the argument arrives
4482 and its size.
4484 There is one complexity here: If this was a parameter that would
4485 have been passed in registers, but wasn't only because it is
4486 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4487 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4488 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4489 0 as it was the previous time. */
4491 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4492 locate_and_pad_parm (promoted_mode, passed_type,
4493 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4495 #else
4496 #ifdef FUNCTION_INCOMING_ARG
4497 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4498 passed_type,
4499 pretend_named) != 0,
4500 #else
4501 FUNCTION_ARG (args_so_far, promoted_mode,
4502 passed_type,
4503 pretend_named) != 0,
4504 #endif
4505 #endif
4506 fndecl, &stack_args_size, &stack_offset, &arg_size,
4507 &alignment_pad);
4510 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4512 if (offset_rtx == const0_rtx)
4513 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4514 else
4515 stack_parm = gen_rtx_MEM (promoted_mode,
4516 gen_rtx_PLUS (Pmode,
4517 internal_arg_pointer,
4518 offset_rtx));
4520 set_mem_attributes (stack_parm, parm, 1);
4523 /* If this parameter was passed both in registers and in the stack,
4524 use the copy on the stack. */
4525 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4526 entry_parm = 0;
4528 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4529 /* If this parm was passed part in regs and part in memory,
4530 pretend it arrived entirely in memory
4531 by pushing the register-part onto the stack.
4533 In the special case of a DImode or DFmode that is split,
4534 we could put it together in a pseudoreg directly,
4535 but for now that's not worth bothering with. */
4537 if (entry_parm)
4539 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4540 passed_type, named_arg);
4542 if (nregs > 0)
4544 current_function_pretend_args_size
4545 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4546 / (PARM_BOUNDARY / BITS_PER_UNIT)
4547 * (PARM_BOUNDARY / BITS_PER_UNIT));
4549 /* Handle calls that pass values in multiple non-contiguous
4550 locations. The Irix 6 ABI has examples of this. */
4551 if (GET_CODE (entry_parm) == PARALLEL)
4552 emit_group_store (validize_mem (stack_parm), entry_parm,
4553 int_size_in_bytes (TREE_TYPE (parm)));
4555 else
4556 move_block_from_reg (REGNO (entry_parm),
4557 validize_mem (stack_parm), nregs,
4558 int_size_in_bytes (TREE_TYPE (parm)));
4560 entry_parm = stack_parm;
4563 #endif
4565 /* If we didn't decide this parm came in a register,
4566 by default it came on the stack. */
4567 if (entry_parm == 0)
4568 entry_parm = stack_parm;
4570 /* Record permanently how this parm was passed. */
4571 DECL_INCOMING_RTL (parm) = entry_parm;
4573 /* If there is actually space on the stack for this parm,
4574 count it in stack_args_size; otherwise set stack_parm to 0
4575 to indicate there is no preallocated stack slot for the parm. */
4577 if (entry_parm == stack_parm
4578 || (GET_CODE (entry_parm) == PARALLEL
4579 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4580 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4581 /* On some machines, even if a parm value arrives in a register
4582 there is still an (uninitialized) stack slot allocated for it.
4584 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4585 whether this parameter already has a stack slot allocated,
4586 because an arg block exists only if current_function_args_size
4587 is larger than some threshold, and we haven't calculated that
4588 yet. So, for now, we just assume that stack slots never exist
4589 in this case. */
4590 || REG_PARM_STACK_SPACE (fndecl) > 0
4591 #endif
4594 stack_args_size.constant += arg_size.constant;
4595 if (arg_size.var)
4596 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4598 else
4599 /* No stack slot was pushed for this parm. */
4600 stack_parm = 0;
4602 /* Update info on where next arg arrives in registers. */
4604 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4605 passed_type, named_arg);
4607 /* If we can't trust the parm stack slot to be aligned enough
4608 for its ultimate type, don't use that slot after entry.
4609 We'll make another stack slot, if we need one. */
4611 unsigned int thisparm_boundary
4612 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4614 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4615 stack_parm = 0;
4618 /* If parm was passed in memory, and we need to convert it on entry,
4619 don't store it back in that same slot. */
4620 if (entry_parm != 0
4621 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4622 stack_parm = 0;
4624 /* When an argument is passed in multiple locations, we can't
4625 make use of this information, but we can save some copying if
4626 the whole argument is passed in a single register. */
4627 if (GET_CODE (entry_parm) == PARALLEL
4628 && nominal_mode != BLKmode && passed_mode != BLKmode)
4630 int i, len = XVECLEN (entry_parm, 0);
4632 for (i = 0; i < len; i++)
4633 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4634 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4635 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4636 == passed_mode)
4637 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4639 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4640 DECL_INCOMING_RTL (parm) = entry_parm;
4641 break;
4645 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4646 in the mode in which it arrives.
4647 STACK_PARM is an RTX for a stack slot where the parameter can live
4648 during the function (in case we want to put it there).
4649 STACK_PARM is 0 if no stack slot was pushed for it.
4651 Now output code if necessary to convert ENTRY_PARM to
4652 the type in which this function declares it,
4653 and store that result in an appropriate place,
4654 which may be a pseudo reg, may be STACK_PARM,
4655 or may be a local stack slot if STACK_PARM is 0.
4657 Set DECL_RTL to that place. */
4659 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4661 /* If a BLKmode arrives in registers, copy it to a stack slot.
4662 Handle calls that pass values in multiple non-contiguous
4663 locations. The Irix 6 ABI has examples of this. */
4664 if (GET_CODE (entry_parm) == REG
4665 || GET_CODE (entry_parm) == PARALLEL)
4667 int size_stored
4668 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4669 UNITS_PER_WORD);
4671 /* Note that we will be storing an integral number of words.
4672 So we have to be careful to ensure that we allocate an
4673 integral number of words. We do this below in the
4674 assign_stack_local if space was not allocated in the argument
4675 list. If it was, this will not work if PARM_BOUNDARY is not
4676 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4677 if it becomes a problem. */
4679 if (stack_parm == 0)
4681 stack_parm
4682 = assign_stack_local (GET_MODE (entry_parm),
4683 size_stored, 0);
4684 set_mem_attributes (stack_parm, parm, 1);
4687 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4688 abort ();
4690 /* Handle calls that pass values in multiple non-contiguous
4691 locations. The Irix 6 ABI has examples of this. */
4692 if (GET_CODE (entry_parm) == PARALLEL)
4693 emit_group_store (validize_mem (stack_parm), entry_parm,
4694 int_size_in_bytes (TREE_TYPE (parm)));
4695 else
4696 move_block_from_reg (REGNO (entry_parm),
4697 validize_mem (stack_parm),
4698 size_stored / UNITS_PER_WORD,
4699 int_size_in_bytes (TREE_TYPE (parm)));
4701 SET_DECL_RTL (parm, stack_parm);
4703 else if (! ((! optimize
4704 && ! DECL_REGISTER (parm))
4705 || TREE_SIDE_EFFECTS (parm)
4706 /* If -ffloat-store specified, don't put explicit
4707 float variables into registers. */
4708 || (flag_float_store
4709 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4710 /* Always assign pseudo to structure return or item passed
4711 by invisible reference. */
4712 || passed_pointer || parm == function_result_decl)
4714 /* Store the parm in a pseudoregister during the function, but we
4715 may need to do it in a wider mode. */
4717 rtx parmreg;
4718 unsigned int regno, regnoi = 0, regnor = 0;
4720 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4722 promoted_nominal_mode
4723 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4725 parmreg = gen_reg_rtx (promoted_nominal_mode);
4726 mark_user_reg (parmreg);
4728 /* If this was an item that we received a pointer to, set DECL_RTL
4729 appropriately. */
4730 if (passed_pointer)
4732 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4733 parmreg);
4734 set_mem_attributes (x, parm, 1);
4735 SET_DECL_RTL (parm, x);
4737 else
4739 SET_DECL_RTL (parm, parmreg);
4740 maybe_set_unchanging (DECL_RTL (parm), parm);
4743 /* Copy the value into the register. */
4744 if (nominal_mode != passed_mode
4745 || promoted_nominal_mode != promoted_mode)
4747 int save_tree_used;
4748 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4749 mode, by the caller. We now have to convert it to
4750 NOMINAL_MODE, if different. However, PARMREG may be in
4751 a different mode than NOMINAL_MODE if it is being stored
4752 promoted.
4754 If ENTRY_PARM is a hard register, it might be in a register
4755 not valid for operating in its mode (e.g., an odd-numbered
4756 register for a DFmode). In that case, moves are the only
4757 thing valid, so we can't do a convert from there. This
4758 occurs when the calling sequence allow such misaligned
4759 usages.
4761 In addition, the conversion may involve a call, which could
4762 clobber parameters which haven't been copied to pseudo
4763 registers yet. Therefore, we must first copy the parm to
4764 a pseudo reg here, and save the conversion until after all
4765 parameters have been moved. */
4767 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4769 emit_move_insn (tempreg, validize_mem (entry_parm));
4771 push_to_sequence (conversion_insns);
4772 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4774 if (GET_CODE (tempreg) == SUBREG
4775 && GET_MODE (tempreg) == nominal_mode
4776 && GET_CODE (SUBREG_REG (tempreg)) == REG
4777 && nominal_mode == passed_mode
4778 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4779 && GET_MODE_SIZE (GET_MODE (tempreg))
4780 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4782 /* The argument is already sign/zero extended, so note it
4783 into the subreg. */
4784 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4785 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4788 /* TREE_USED gets set erroneously during expand_assignment. */
4789 save_tree_used = TREE_USED (parm);
4790 expand_assignment (parm,
4791 make_tree (nominal_type, tempreg), 0, 0);
4792 TREE_USED (parm) = save_tree_used;
4793 conversion_insns = get_insns ();
4794 did_conversion = 1;
4795 end_sequence ();
4797 else
4798 emit_move_insn (parmreg, validize_mem (entry_parm));
4800 /* If we were passed a pointer but the actual value
4801 can safely live in a register, put it in one. */
4802 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4803 /* If by-reference argument was promoted, demote it. */
4804 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4805 || ! ((! optimize
4806 && ! DECL_REGISTER (parm))
4807 || TREE_SIDE_EFFECTS (parm)
4808 /* If -ffloat-store specified, don't put explicit
4809 float variables into registers. */
4810 || (flag_float_store
4811 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4813 /* We can't use nominal_mode, because it will have been set to
4814 Pmode above. We must use the actual mode of the parm. */
4815 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4816 mark_user_reg (parmreg);
4817 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4819 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4820 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4821 push_to_sequence (conversion_insns);
4822 emit_move_insn (tempreg, DECL_RTL (parm));
4823 SET_DECL_RTL (parm,
4824 convert_to_mode (GET_MODE (parmreg),
4825 tempreg,
4826 unsigned_p));
4827 emit_move_insn (parmreg, DECL_RTL (parm));
4828 conversion_insns = get_insns();
4829 did_conversion = 1;
4830 end_sequence ();
4832 else
4833 emit_move_insn (parmreg, DECL_RTL (parm));
4834 SET_DECL_RTL (parm, parmreg);
4835 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4836 now the parm. */
4837 stack_parm = 0;
4839 #ifdef FUNCTION_ARG_CALLEE_COPIES
4840 /* If we are passed an arg by reference and it is our responsibility
4841 to make a copy, do it now.
4842 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4843 original argument, so we must recreate them in the call to
4844 FUNCTION_ARG_CALLEE_COPIES. */
4845 /* ??? Later add code to handle the case that if the argument isn't
4846 modified, don't do the copy. */
4848 else if (passed_pointer
4849 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4850 TYPE_MODE (DECL_ARG_TYPE (parm)),
4851 DECL_ARG_TYPE (parm),
4852 named_arg)
4853 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4855 rtx copy;
4856 tree type = DECL_ARG_TYPE (parm);
4858 /* This sequence may involve a library call perhaps clobbering
4859 registers that haven't been copied to pseudos yet. */
4861 push_to_sequence (conversion_insns);
4863 if (!COMPLETE_TYPE_P (type)
4864 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4865 /* This is a variable sized object. */
4866 copy = gen_rtx_MEM (BLKmode,
4867 allocate_dynamic_stack_space
4868 (expr_size (parm), NULL_RTX,
4869 TYPE_ALIGN (type)));
4870 else
4871 copy = assign_stack_temp (TYPE_MODE (type),
4872 int_size_in_bytes (type), 1);
4873 set_mem_attributes (copy, parm, 1);
4875 store_expr (parm, copy, 0);
4876 emit_move_insn (parmreg, XEXP (copy, 0));
4877 conversion_insns = get_insns ();
4878 did_conversion = 1;
4879 end_sequence ();
4881 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4883 /* In any case, record the parm's desired stack location
4884 in case we later discover it must live in the stack.
4886 If it is a COMPLEX value, store the stack location for both
4887 halves. */
4889 if (GET_CODE (parmreg) == CONCAT)
4890 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4891 else
4892 regno = REGNO (parmreg);
4894 if (regno >= max_parm_reg)
4896 rtx *new;
4897 int old_max_parm_reg = max_parm_reg;
4899 /* It's slow to expand this one register at a time,
4900 but it's also rare and we need max_parm_reg to be
4901 precisely correct. */
4902 max_parm_reg = regno + 1;
4903 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
4904 max_parm_reg * sizeof (rtx));
4905 memset ((char *) (new + old_max_parm_reg), 0,
4906 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4907 parm_reg_stack_loc = new;
4910 if (GET_CODE (parmreg) == CONCAT)
4912 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4914 regnor = REGNO (gen_realpart (submode, parmreg));
4915 regnoi = REGNO (gen_imagpart (submode, parmreg));
4917 if (stack_parm != 0)
4919 parm_reg_stack_loc[regnor]
4920 = gen_realpart (submode, stack_parm);
4921 parm_reg_stack_loc[regnoi]
4922 = gen_imagpart (submode, stack_parm);
4924 else
4926 parm_reg_stack_loc[regnor] = 0;
4927 parm_reg_stack_loc[regnoi] = 0;
4930 else
4931 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4933 /* Mark the register as eliminable if we did no conversion
4934 and it was copied from memory at a fixed offset,
4935 and the arg pointer was not copied to a pseudo-reg.
4936 If the arg pointer is a pseudo reg or the offset formed
4937 an invalid address, such memory-equivalences
4938 as we make here would screw up life analysis for it. */
4939 if (nominal_mode == passed_mode
4940 && ! did_conversion
4941 && stack_parm != 0
4942 && GET_CODE (stack_parm) == MEM
4943 && stack_offset.var == 0
4944 && reg_mentioned_p (virtual_incoming_args_rtx,
4945 XEXP (stack_parm, 0)))
4947 rtx linsn = get_last_insn ();
4948 rtx sinsn, set;
4950 /* Mark complex types separately. */
4951 if (GET_CODE (parmreg) == CONCAT)
4952 /* Scan backwards for the set of the real and
4953 imaginary parts. */
4954 for (sinsn = linsn; sinsn != 0;
4955 sinsn = prev_nonnote_insn (sinsn))
4957 set = single_set (sinsn);
4958 if (set != 0
4959 && SET_DEST (set) == regno_reg_rtx [regnoi])
4960 REG_NOTES (sinsn)
4961 = gen_rtx_EXPR_LIST (REG_EQUIV,
4962 parm_reg_stack_loc[regnoi],
4963 REG_NOTES (sinsn));
4964 else if (set != 0
4965 && SET_DEST (set) == regno_reg_rtx [regnor])
4966 REG_NOTES (sinsn)
4967 = gen_rtx_EXPR_LIST (REG_EQUIV,
4968 parm_reg_stack_loc[regnor],
4969 REG_NOTES (sinsn));
4971 else if ((set = single_set (linsn)) != 0
4972 && SET_DEST (set) == parmreg)
4973 REG_NOTES (linsn)
4974 = gen_rtx_EXPR_LIST (REG_EQUIV,
4975 stack_parm, REG_NOTES (linsn));
4978 /* For pointer data type, suggest pointer register. */
4979 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4980 mark_reg_pointer (parmreg,
4981 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4983 /* If something wants our address, try to use ADDRESSOF. */
4984 if (TREE_ADDRESSABLE (parm))
4986 /* If we end up putting something into the stack,
4987 fixup_var_refs_insns will need to make a pass over
4988 all the instructions. It looks through the pending
4989 sequences -- but it can't see the ones in the
4990 CONVERSION_INSNS, if they're not on the sequence
4991 stack. So, we go back to that sequence, just so that
4992 the fixups will happen. */
4993 push_to_sequence (conversion_insns);
4994 put_var_into_stack (parm);
4995 conversion_insns = get_insns ();
4996 end_sequence ();
4999 else
5001 /* Value must be stored in the stack slot STACK_PARM
5002 during function execution. */
5004 if (promoted_mode != nominal_mode)
5006 /* Conversion is required. */
5007 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5009 emit_move_insn (tempreg, validize_mem (entry_parm));
5011 push_to_sequence (conversion_insns);
5012 entry_parm = convert_to_mode (nominal_mode, tempreg,
5013 TREE_UNSIGNED (TREE_TYPE (parm)));
5014 if (stack_parm)
5015 /* ??? This may need a big-endian conversion on sparc64. */
5016 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5018 conversion_insns = get_insns ();
5019 did_conversion = 1;
5020 end_sequence ();
5023 if (entry_parm != stack_parm)
5025 if (stack_parm == 0)
5027 stack_parm
5028 = assign_stack_local (GET_MODE (entry_parm),
5029 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5030 set_mem_attributes (stack_parm, parm, 1);
5033 if (promoted_mode != nominal_mode)
5035 push_to_sequence (conversion_insns);
5036 emit_move_insn (validize_mem (stack_parm),
5037 validize_mem (entry_parm));
5038 conversion_insns = get_insns ();
5039 end_sequence ();
5041 else
5042 emit_move_insn (validize_mem (stack_parm),
5043 validize_mem (entry_parm));
5046 SET_DECL_RTL (parm, stack_parm);
5049 /* If this "parameter" was the place where we are receiving the
5050 function's incoming structure pointer, set up the result. */
5051 if (parm == function_result_decl)
5053 tree result = DECL_RESULT (fndecl);
5054 rtx addr = DECL_RTL (parm);
5055 rtx x;
5057 #ifdef POINTERS_EXTEND_UNSIGNED
5058 if (GET_MODE (addr) != Pmode)
5059 addr = convert_memory_address (Pmode, addr);
5060 #endif
5062 x = gen_rtx_MEM (DECL_MODE (result), addr);
5063 set_mem_attributes (x, result, 1);
5064 SET_DECL_RTL (result, x);
5067 if (GET_CODE (DECL_RTL (parm)) == REG)
5068 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5069 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5071 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5072 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5077 /* Output all parameter conversion instructions (possibly including calls)
5078 now that all parameters have been copied out of hard registers. */
5079 emit_insn (conversion_insns);
5081 last_parm_insn = get_last_insn ();
5083 current_function_args_size = stack_args_size.constant;
5085 /* Adjust function incoming argument size for alignment and
5086 minimum length. */
5088 #ifdef REG_PARM_STACK_SPACE
5089 #ifndef MAYBE_REG_PARM_STACK_SPACE
5090 current_function_args_size = MAX (current_function_args_size,
5091 REG_PARM_STACK_SPACE (fndecl));
5092 #endif
5093 #endif
5095 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5097 current_function_args_size
5098 = ((current_function_args_size + STACK_BYTES - 1)
5099 / STACK_BYTES) * STACK_BYTES;
5101 #ifdef ARGS_GROW_DOWNWARD
5102 current_function_arg_offset_rtx
5103 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5104 : expand_expr (size_diffop (stack_args_size.var,
5105 size_int (-stack_args_size.constant)),
5106 NULL_RTX, VOIDmode, 0));
5107 #else
5108 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5109 #endif
5111 /* See how many bytes, if any, of its args a function should try to pop
5112 on return. */
5114 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5115 current_function_args_size);
5117 /* For stdarg.h function, save info about
5118 regs and stack space used by the named args. */
5120 current_function_args_info = args_so_far;
5122 /* Set the rtx used for the function return value. Put this in its
5123 own variable so any optimizers that need this information don't have
5124 to include tree.h. Do this here so it gets done when an inlined
5125 function gets output. */
5127 current_function_return_rtx
5128 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5129 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5131 /* If scalar return value was computed in a pseudo-reg, or was a named
5132 return value that got dumped to the stack, copy that to the hard
5133 return register. */
5134 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5136 tree decl_result = DECL_RESULT (fndecl);
5137 rtx decl_rtl = DECL_RTL (decl_result);
5139 if (REG_P (decl_rtl)
5140 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5141 : DECL_REGISTER (decl_result))
5143 rtx real_decl_rtl;
5145 #ifdef FUNCTION_OUTGOING_VALUE
5146 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5147 fndecl);
5148 #else
5149 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5150 fndecl);
5151 #endif
5152 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5153 /* The delay slot scheduler assumes that current_function_return_rtx
5154 holds the hard register containing the return value, not a
5155 temporary pseudo. */
5156 current_function_return_rtx = real_decl_rtl;
5161 /* Indicate whether REGNO is an incoming argument to the current function
5162 that was promoted to a wider mode. If so, return the RTX for the
5163 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5164 that REGNO is promoted from and whether the promotion was signed or
5165 unsigned. */
5167 #ifdef PROMOTE_FUNCTION_ARGS
5170 promoted_input_arg (regno, pmode, punsignedp)
5171 unsigned int regno;
5172 enum machine_mode *pmode;
5173 int *punsignedp;
5175 tree arg;
5177 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5178 arg = TREE_CHAIN (arg))
5179 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5180 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5181 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5183 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5184 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5186 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5187 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5188 && mode != DECL_MODE (arg))
5190 *pmode = DECL_MODE (arg);
5191 *punsignedp = unsignedp;
5192 return DECL_INCOMING_RTL (arg);
5196 return 0;
5199 #endif
5201 /* Compute the size and offset from the start of the stacked arguments for a
5202 parm passed in mode PASSED_MODE and with type TYPE.
5204 INITIAL_OFFSET_PTR points to the current offset into the stacked
5205 arguments.
5207 The starting offset and size for this parm are returned in *OFFSET_PTR
5208 and *ARG_SIZE_PTR, respectively.
5210 IN_REGS is non-zero if the argument will be passed in registers. It will
5211 never be set if REG_PARM_STACK_SPACE is not defined.
5213 FNDECL is the function in which the argument was defined.
5215 There are two types of rounding that are done. The first, controlled by
5216 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5217 list to be aligned to the specific boundary (in bits). This rounding
5218 affects the initial and starting offsets, but not the argument size.
5220 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5221 optionally rounds the size of the parm to PARM_BOUNDARY. The
5222 initial offset is not affected by this rounding, while the size always
5223 is and the starting offset may be. */
5225 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5226 initial_offset_ptr is positive because locate_and_pad_parm's
5227 callers pass in the total size of args so far as
5228 initial_offset_ptr. arg_size_ptr is always positive. */
5230 void
5231 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5232 initial_offset_ptr, offset_ptr, arg_size_ptr,
5233 alignment_pad)
5234 enum machine_mode passed_mode;
5235 tree type;
5236 int in_regs ATTRIBUTE_UNUSED;
5237 tree fndecl ATTRIBUTE_UNUSED;
5238 struct args_size *initial_offset_ptr;
5239 struct args_size *offset_ptr;
5240 struct args_size *arg_size_ptr;
5241 struct args_size *alignment_pad;
5244 tree sizetree
5245 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5246 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5247 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5249 #ifdef REG_PARM_STACK_SPACE
5250 /* If we have found a stack parm before we reach the end of the
5251 area reserved for registers, skip that area. */
5252 if (! in_regs)
5254 int reg_parm_stack_space = 0;
5256 #ifdef MAYBE_REG_PARM_STACK_SPACE
5257 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5258 #else
5259 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5260 #endif
5261 if (reg_parm_stack_space > 0)
5263 if (initial_offset_ptr->var)
5265 initial_offset_ptr->var
5266 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5267 ssize_int (reg_parm_stack_space));
5268 initial_offset_ptr->constant = 0;
5270 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5271 initial_offset_ptr->constant = reg_parm_stack_space;
5274 #endif /* REG_PARM_STACK_SPACE */
5276 arg_size_ptr->var = 0;
5277 arg_size_ptr->constant = 0;
5278 alignment_pad->var = 0;
5279 alignment_pad->constant = 0;
5281 #ifdef ARGS_GROW_DOWNWARD
5282 if (initial_offset_ptr->var)
5284 offset_ptr->constant = 0;
5285 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5286 initial_offset_ptr->var);
5288 else
5290 offset_ptr->constant = -initial_offset_ptr->constant;
5291 offset_ptr->var = 0;
5293 if (where_pad != none
5294 && (!host_integerp (sizetree, 1)
5295 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5296 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5297 SUB_PARM_SIZE (*offset_ptr, sizetree);
5298 if (where_pad != downward)
5299 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5300 if (initial_offset_ptr->var)
5301 arg_size_ptr->var = size_binop (MINUS_EXPR,
5302 size_binop (MINUS_EXPR,
5303 ssize_int (0),
5304 initial_offset_ptr->var),
5305 offset_ptr->var);
5307 else
5308 arg_size_ptr->constant = (-initial_offset_ptr->constant
5309 - offset_ptr->constant);
5311 #else /* !ARGS_GROW_DOWNWARD */
5312 if (!in_regs
5313 #ifdef REG_PARM_STACK_SPACE
5314 || REG_PARM_STACK_SPACE (fndecl) > 0
5315 #endif
5317 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5318 *offset_ptr = *initial_offset_ptr;
5320 #ifdef PUSH_ROUNDING
5321 if (passed_mode != BLKmode)
5322 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5323 #endif
5325 /* Pad_below needs the pre-rounded size to know how much to pad below
5326 so this must be done before rounding up. */
5327 if (where_pad == downward
5328 /* However, BLKmode args passed in regs have their padding done elsewhere.
5329 The stack slot must be able to hold the entire register. */
5330 && !(in_regs && passed_mode == BLKmode))
5331 pad_below (offset_ptr, passed_mode, sizetree);
5333 if (where_pad != none
5334 && (!host_integerp (sizetree, 1)
5335 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5336 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5338 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5339 #endif /* ARGS_GROW_DOWNWARD */
5342 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5343 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5345 static void
5346 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5347 struct args_size *offset_ptr;
5348 int boundary;
5349 struct args_size *alignment_pad;
5351 tree save_var = NULL_TREE;
5352 HOST_WIDE_INT save_constant = 0;
5354 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5356 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5358 save_var = offset_ptr->var;
5359 save_constant = offset_ptr->constant;
5362 alignment_pad->var = NULL_TREE;
5363 alignment_pad->constant = 0;
5365 if (boundary > BITS_PER_UNIT)
5367 if (offset_ptr->var)
5369 offset_ptr->var =
5370 #ifdef ARGS_GROW_DOWNWARD
5371 round_down
5372 #else
5373 round_up
5374 #endif
5375 (ARGS_SIZE_TREE (*offset_ptr),
5376 boundary / BITS_PER_UNIT);
5377 offset_ptr->constant = 0; /*?*/
5378 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5379 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5380 save_var);
5382 else
5384 offset_ptr->constant =
5385 #ifdef ARGS_GROW_DOWNWARD
5386 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5387 #else
5388 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5389 #endif
5390 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5391 alignment_pad->constant = offset_ptr->constant - save_constant;
5396 #ifndef ARGS_GROW_DOWNWARD
5397 static void
5398 pad_below (offset_ptr, passed_mode, sizetree)
5399 struct args_size *offset_ptr;
5400 enum machine_mode passed_mode;
5401 tree sizetree;
5403 if (passed_mode != BLKmode)
5405 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5406 offset_ptr->constant
5407 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5408 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5409 - GET_MODE_SIZE (passed_mode));
5411 else
5413 if (TREE_CODE (sizetree) != INTEGER_CST
5414 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5416 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5417 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5418 /* Add it in. */
5419 ADD_PARM_SIZE (*offset_ptr, s2);
5420 SUB_PARM_SIZE (*offset_ptr, sizetree);
5424 #endif
5426 /* Walk the tree of blocks describing the binding levels within a function
5427 and warn about uninitialized variables.
5428 This is done after calling flow_analysis and before global_alloc
5429 clobbers the pseudo-regs to hard regs. */
5431 void
5432 uninitialized_vars_warning (block)
5433 tree block;
5435 tree decl, sub;
5436 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5438 if (warn_uninitialized
5439 && TREE_CODE (decl) == VAR_DECL
5440 /* These warnings are unreliable for and aggregates
5441 because assigning the fields one by one can fail to convince
5442 flow.c that the entire aggregate was initialized.
5443 Unions are troublesome because members may be shorter. */
5444 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5445 && DECL_RTL (decl) != 0
5446 && GET_CODE (DECL_RTL (decl)) == REG
5447 /* Global optimizations can make it difficult to determine if a
5448 particular variable has been initialized. However, a VAR_DECL
5449 with a nonzero DECL_INITIAL had an initializer, so do not
5450 claim it is potentially uninitialized.
5452 We do not care about the actual value in DECL_INITIAL, so we do
5453 not worry that it may be a dangling pointer. */
5454 && DECL_INITIAL (decl) == NULL_TREE
5455 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5456 warning_with_decl (decl,
5457 "`%s' might be used uninitialized in this function");
5458 if (extra_warnings
5459 && TREE_CODE (decl) == VAR_DECL
5460 && DECL_RTL (decl) != 0
5461 && GET_CODE (DECL_RTL (decl)) == REG
5462 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5463 warning_with_decl (decl,
5464 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5466 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5467 uninitialized_vars_warning (sub);
5470 /* Do the appropriate part of uninitialized_vars_warning
5471 but for arguments instead of local variables. */
5473 void
5474 setjmp_args_warning ()
5476 tree decl;
5477 for (decl = DECL_ARGUMENTS (current_function_decl);
5478 decl; decl = TREE_CHAIN (decl))
5479 if (DECL_RTL (decl) != 0
5480 && GET_CODE (DECL_RTL (decl)) == REG
5481 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5482 warning_with_decl (decl,
5483 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5486 /* If this function call setjmp, put all vars into the stack
5487 unless they were declared `register'. */
5489 void
5490 setjmp_protect (block)
5491 tree block;
5493 tree decl, sub;
5494 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5495 if ((TREE_CODE (decl) == VAR_DECL
5496 || TREE_CODE (decl) == PARM_DECL)
5497 && DECL_RTL (decl) != 0
5498 && (GET_CODE (DECL_RTL (decl)) == REG
5499 || (GET_CODE (DECL_RTL (decl)) == MEM
5500 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5501 /* If this variable came from an inline function, it must be
5502 that its life doesn't overlap the setjmp. If there was a
5503 setjmp in the function, it would already be in memory. We
5504 must exclude such variable because their DECL_RTL might be
5505 set to strange things such as virtual_stack_vars_rtx. */
5506 && ! DECL_FROM_INLINE (decl)
5507 && (
5508 #ifdef NON_SAVING_SETJMP
5509 /* If longjmp doesn't restore the registers,
5510 don't put anything in them. */
5511 NON_SAVING_SETJMP
5513 #endif
5514 ! DECL_REGISTER (decl)))
5515 put_var_into_stack (decl);
5516 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5517 setjmp_protect (sub);
5520 /* Like the previous function, but for args instead of local variables. */
5522 void
5523 setjmp_protect_args ()
5525 tree decl;
5526 for (decl = DECL_ARGUMENTS (current_function_decl);
5527 decl; decl = TREE_CHAIN (decl))
5528 if ((TREE_CODE (decl) == VAR_DECL
5529 || TREE_CODE (decl) == PARM_DECL)
5530 && DECL_RTL (decl) != 0
5531 && (GET_CODE (DECL_RTL (decl)) == REG
5532 || (GET_CODE (DECL_RTL (decl)) == MEM
5533 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5534 && (
5535 /* If longjmp doesn't restore the registers,
5536 don't put anything in them. */
5537 #ifdef NON_SAVING_SETJMP
5538 NON_SAVING_SETJMP
5540 #endif
5541 ! DECL_REGISTER (decl)))
5542 put_var_into_stack (decl);
5545 /* Return the context-pointer register corresponding to DECL,
5546 or 0 if it does not need one. */
5549 lookup_static_chain (decl)
5550 tree decl;
5552 tree context = decl_function_context (decl);
5553 tree link;
5555 if (context == 0
5556 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5557 return 0;
5559 /* We treat inline_function_decl as an alias for the current function
5560 because that is the inline function whose vars, types, etc.
5561 are being merged into the current function.
5562 See expand_inline_function. */
5563 if (context == current_function_decl || context == inline_function_decl)
5564 return virtual_stack_vars_rtx;
5566 for (link = context_display; link; link = TREE_CHAIN (link))
5567 if (TREE_PURPOSE (link) == context)
5568 return RTL_EXPR_RTL (TREE_VALUE (link));
5570 abort ();
5573 /* Convert a stack slot address ADDR for variable VAR
5574 (from a containing function)
5575 into an address valid in this function (using a static chain). */
5578 fix_lexical_addr (addr, var)
5579 rtx addr;
5580 tree var;
5582 rtx basereg;
5583 HOST_WIDE_INT displacement;
5584 tree context = decl_function_context (var);
5585 struct function *fp;
5586 rtx base = 0;
5588 /* If this is the present function, we need not do anything. */
5589 if (context == current_function_decl || context == inline_function_decl)
5590 return addr;
5592 fp = find_function_data (context);
5594 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5595 addr = XEXP (XEXP (addr, 0), 0);
5597 /* Decode given address as base reg plus displacement. */
5598 if (GET_CODE (addr) == REG)
5599 basereg = addr, displacement = 0;
5600 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5601 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5602 else
5603 abort ();
5605 /* We accept vars reached via the containing function's
5606 incoming arg pointer and via its stack variables pointer. */
5607 if (basereg == fp->internal_arg_pointer)
5609 /* If reached via arg pointer, get the arg pointer value
5610 out of that function's stack frame.
5612 There are two cases: If a separate ap is needed, allocate a
5613 slot in the outer function for it and dereference it that way.
5614 This is correct even if the real ap is actually a pseudo.
5615 Otherwise, just adjust the offset from the frame pointer to
5616 compensate. */
5618 #ifdef NEED_SEPARATE_AP
5619 rtx addr;
5621 addr = get_arg_pointer_save_area (fp);
5622 addr = fix_lexical_addr (XEXP (addr, 0), var);
5623 addr = memory_address (Pmode, addr);
5625 base = gen_rtx_MEM (Pmode, addr);
5626 set_mem_alias_set (base, get_frame_alias_set ());
5627 base = copy_to_reg (base);
5628 #else
5629 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5630 base = lookup_static_chain (var);
5631 #endif
5634 else if (basereg == virtual_stack_vars_rtx)
5636 /* This is the same code as lookup_static_chain, duplicated here to
5637 avoid an extra call to decl_function_context. */
5638 tree link;
5640 for (link = context_display; link; link = TREE_CHAIN (link))
5641 if (TREE_PURPOSE (link) == context)
5643 base = RTL_EXPR_RTL (TREE_VALUE (link));
5644 break;
5648 if (base == 0)
5649 abort ();
5651 /* Use same offset, relative to appropriate static chain or argument
5652 pointer. */
5653 return plus_constant (base, displacement);
5656 /* Return the address of the trampoline for entering nested fn FUNCTION.
5657 If necessary, allocate a trampoline (in the stack frame)
5658 and emit rtl to initialize its contents (at entry to this function). */
5661 trampoline_address (function)
5662 tree function;
5664 tree link;
5665 tree rtlexp;
5666 rtx tramp;
5667 struct function *fp;
5668 tree fn_context;
5670 /* Find an existing trampoline and return it. */
5671 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5672 if (TREE_PURPOSE (link) == function)
5673 return
5674 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5676 for (fp = outer_function_chain; fp; fp = fp->outer)
5677 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5678 if (TREE_PURPOSE (link) == function)
5680 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5681 function);
5682 return adjust_trampoline_addr (tramp);
5685 /* None exists; we must make one. */
5687 /* Find the `struct function' for the function containing FUNCTION. */
5688 fp = 0;
5689 fn_context = decl_function_context (function);
5690 if (fn_context != current_function_decl
5691 && fn_context != inline_function_decl)
5692 fp = find_function_data (fn_context);
5694 /* Allocate run-time space for this trampoline
5695 (usually in the defining function's stack frame). */
5696 #ifdef ALLOCATE_TRAMPOLINE
5697 tramp = ALLOCATE_TRAMPOLINE (fp);
5698 #else
5699 /* If rounding needed, allocate extra space
5700 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5701 #ifdef TRAMPOLINE_ALIGNMENT
5702 #define TRAMPOLINE_REAL_SIZE \
5703 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5704 #else
5705 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5706 #endif
5707 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5708 fp ? fp : cfun);
5709 #endif
5711 /* Record the trampoline for reuse and note it for later initialization
5712 by expand_function_end. */
5713 if (fp != 0)
5715 rtlexp = make_node (RTL_EXPR);
5716 RTL_EXPR_RTL (rtlexp) = tramp;
5717 fp->x_trampoline_list = tree_cons (function, rtlexp,
5718 fp->x_trampoline_list);
5720 else
5722 /* Make the RTL_EXPR node temporary, not momentary, so that the
5723 trampoline_list doesn't become garbage. */
5724 rtlexp = make_node (RTL_EXPR);
5726 RTL_EXPR_RTL (rtlexp) = tramp;
5727 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5730 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5731 return adjust_trampoline_addr (tramp);
5734 /* Given a trampoline address,
5735 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5737 static rtx
5738 round_trampoline_addr (tramp)
5739 rtx tramp;
5741 #ifdef TRAMPOLINE_ALIGNMENT
5742 /* Round address up to desired boundary. */
5743 rtx temp = gen_reg_rtx (Pmode);
5744 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5745 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5747 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5748 temp, 0, OPTAB_LIB_WIDEN);
5749 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5750 temp, 0, OPTAB_LIB_WIDEN);
5751 #endif
5752 return tramp;
5755 /* Given a trampoline address, round it then apply any
5756 platform-specific adjustments so that the result can be used for a
5757 function call . */
5759 static rtx
5760 adjust_trampoline_addr (tramp)
5761 rtx tramp;
5763 tramp = round_trampoline_addr (tramp);
5764 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5765 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5766 #endif
5767 return tramp;
5770 /* Put all this function's BLOCK nodes including those that are chained
5771 onto the first block into a vector, and return it.
5772 Also store in each NOTE for the beginning or end of a block
5773 the index of that block in the vector.
5774 The arguments are BLOCK, the chain of top-level blocks of the function,
5775 and INSNS, the insn chain of the function. */
5777 void
5778 identify_blocks ()
5780 int n_blocks;
5781 tree *block_vector, *last_block_vector;
5782 tree *block_stack;
5783 tree block = DECL_INITIAL (current_function_decl);
5785 if (block == 0)
5786 return;
5788 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5789 depth-first order. */
5790 block_vector = get_block_vector (block, &n_blocks);
5791 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5793 last_block_vector = identify_blocks_1 (get_insns (),
5794 block_vector + 1,
5795 block_vector + n_blocks,
5796 block_stack);
5798 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5799 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5800 if (0 && last_block_vector != block_vector + n_blocks)
5801 abort ();
5803 free (block_vector);
5804 free (block_stack);
5807 /* Subroutine of identify_blocks. Do the block substitution on the
5808 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5810 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5811 BLOCK_VECTOR is incremented for each block seen. */
5813 static tree *
5814 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5815 rtx insns;
5816 tree *block_vector;
5817 tree *end_block_vector;
5818 tree *orig_block_stack;
5820 rtx insn;
5821 tree *block_stack = orig_block_stack;
5823 for (insn = insns; insn; insn = NEXT_INSN (insn))
5825 if (GET_CODE (insn) == NOTE)
5827 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5829 tree b;
5831 /* If there are more block notes than BLOCKs, something
5832 is badly wrong. */
5833 if (block_vector == end_block_vector)
5834 abort ();
5836 b = *block_vector++;
5837 NOTE_BLOCK (insn) = b;
5838 *block_stack++ = b;
5840 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5842 /* If there are more NOTE_INSN_BLOCK_ENDs than
5843 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5844 if (block_stack == orig_block_stack)
5845 abort ();
5847 NOTE_BLOCK (insn) = *--block_stack;
5850 else if (GET_CODE (insn) == CALL_INSN
5851 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5853 rtx cp = PATTERN (insn);
5855 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5856 end_block_vector, block_stack);
5857 if (XEXP (cp, 1))
5858 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5859 end_block_vector, block_stack);
5860 if (XEXP (cp, 2))
5861 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5862 end_block_vector, block_stack);
5866 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5867 something is badly wrong. */
5868 if (block_stack != orig_block_stack)
5869 abort ();
5871 return block_vector;
5874 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5875 and create duplicate blocks. */
5876 /* ??? Need an option to either create block fragments or to create
5877 abstract origin duplicates of a source block. It really depends
5878 on what optimization has been performed. */
5880 void
5881 reorder_blocks ()
5883 tree block = DECL_INITIAL (current_function_decl);
5884 varray_type block_stack;
5886 if (block == NULL_TREE)
5887 return;
5889 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5891 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5892 reorder_blocks_0 (block);
5894 /* Prune the old trees away, so that they don't get in the way. */
5895 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5896 BLOCK_CHAIN (block) = NULL_TREE;
5898 /* Recreate the block tree from the note nesting. */
5899 reorder_blocks_1 (get_insns (), block, &block_stack);
5900 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5902 /* Remove deleted blocks from the block fragment chains. */
5903 reorder_fix_fragments (block);
5906 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5908 static void
5909 reorder_blocks_0 (block)
5910 tree block;
5912 while (block)
5914 TREE_ASM_WRITTEN (block) = 0;
5915 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5916 block = BLOCK_CHAIN (block);
5920 static void
5921 reorder_blocks_1 (insns, current_block, p_block_stack)
5922 rtx insns;
5923 tree current_block;
5924 varray_type *p_block_stack;
5926 rtx insn;
5928 for (insn = insns; insn; insn = NEXT_INSN (insn))
5930 if (GET_CODE (insn) == NOTE)
5932 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5934 tree block = NOTE_BLOCK (insn);
5936 /* If we have seen this block before, that means it now
5937 spans multiple address regions. Create a new fragment. */
5938 if (TREE_ASM_WRITTEN (block))
5940 tree new_block = copy_node (block);
5941 tree origin;
5943 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5944 ? BLOCK_FRAGMENT_ORIGIN (block)
5945 : block);
5946 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5947 BLOCK_FRAGMENT_CHAIN (new_block)
5948 = BLOCK_FRAGMENT_CHAIN (origin);
5949 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5951 NOTE_BLOCK (insn) = new_block;
5952 block = new_block;
5955 BLOCK_SUBBLOCKS (block) = 0;
5956 TREE_ASM_WRITTEN (block) = 1;
5957 BLOCK_SUPERCONTEXT (block) = current_block;
5958 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5959 BLOCK_SUBBLOCKS (current_block) = block;
5960 current_block = block;
5961 VARRAY_PUSH_TREE (*p_block_stack, block);
5963 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5965 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5966 VARRAY_POP (*p_block_stack);
5967 BLOCK_SUBBLOCKS (current_block)
5968 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5969 current_block = BLOCK_SUPERCONTEXT (current_block);
5972 else if (GET_CODE (insn) == CALL_INSN
5973 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5975 rtx cp = PATTERN (insn);
5976 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5977 if (XEXP (cp, 1))
5978 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5979 if (XEXP (cp, 2))
5980 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5985 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5986 appears in the block tree, select one of the fragments to become
5987 the new origin block. */
5989 static void
5990 reorder_fix_fragments (block)
5991 tree block;
5993 while (block)
5995 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5996 tree new_origin = NULL_TREE;
5998 if (dup_origin)
6000 if (! TREE_ASM_WRITTEN (dup_origin))
6002 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6004 /* Find the first of the remaining fragments. There must
6005 be at least one -- the current block. */
6006 while (! TREE_ASM_WRITTEN (new_origin))
6007 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6008 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6011 else if (! dup_origin)
6012 new_origin = block;
6014 /* Re-root the rest of the fragments to the new origin. In the
6015 case that DUP_ORIGIN was null, that means BLOCK was the origin
6016 of a chain of fragments and we want to remove those fragments
6017 that didn't make it to the output. */
6018 if (new_origin)
6020 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6021 tree chain = *pp;
6023 while (chain)
6025 if (TREE_ASM_WRITTEN (chain))
6027 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6028 *pp = chain;
6029 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6031 chain = BLOCK_FRAGMENT_CHAIN (chain);
6033 *pp = NULL_TREE;
6036 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6037 block = BLOCK_CHAIN (block);
6041 /* Reverse the order of elements in the chain T of blocks,
6042 and return the new head of the chain (old last element). */
6044 static tree
6045 blocks_nreverse (t)
6046 tree t;
6048 tree prev = 0, decl, next;
6049 for (decl = t; decl; decl = next)
6051 next = BLOCK_CHAIN (decl);
6052 BLOCK_CHAIN (decl) = prev;
6053 prev = decl;
6055 return prev;
6058 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6059 non-NULL, list them all into VECTOR, in a depth-first preorder
6060 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6061 blocks. */
6063 static int
6064 all_blocks (block, vector)
6065 tree block;
6066 tree *vector;
6068 int n_blocks = 0;
6070 while (block)
6072 TREE_ASM_WRITTEN (block) = 0;
6074 /* Record this block. */
6075 if (vector)
6076 vector[n_blocks] = block;
6078 ++n_blocks;
6080 /* Record the subblocks, and their subblocks... */
6081 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6082 vector ? vector + n_blocks : 0);
6083 block = BLOCK_CHAIN (block);
6086 return n_blocks;
6089 /* Return a vector containing all the blocks rooted at BLOCK. The
6090 number of elements in the vector is stored in N_BLOCKS_P. The
6091 vector is dynamically allocated; it is the caller's responsibility
6092 to call `free' on the pointer returned. */
6094 static tree *
6095 get_block_vector (block, n_blocks_p)
6096 tree block;
6097 int *n_blocks_p;
6099 tree *block_vector;
6101 *n_blocks_p = all_blocks (block, NULL);
6102 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6103 all_blocks (block, block_vector);
6105 return block_vector;
6108 static int next_block_index = 2;
6110 /* Set BLOCK_NUMBER for all the blocks in FN. */
6112 void
6113 number_blocks (fn)
6114 tree fn;
6116 int i;
6117 int n_blocks;
6118 tree *block_vector;
6120 /* For SDB and XCOFF debugging output, we start numbering the blocks
6121 from 1 within each function, rather than keeping a running
6122 count. */
6123 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6124 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6125 next_block_index = 1;
6126 #endif
6128 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6130 /* The top-level BLOCK isn't numbered at all. */
6131 for (i = 1; i < n_blocks; ++i)
6132 /* We number the blocks from two. */
6133 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6135 free (block_vector);
6137 return;
6140 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6142 tree
6143 debug_find_var_in_block_tree (var, block)
6144 tree var;
6145 tree block;
6147 tree t;
6149 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6150 if (t == var)
6151 return block;
6153 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6155 tree ret = debug_find_var_in_block_tree (var, t);
6156 if (ret)
6157 return ret;
6160 return NULL_TREE;
6163 /* Allocate a function structure and reset its contents to the defaults. */
6165 static void
6166 prepare_function_start ()
6168 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6170 init_stmt_for_function ();
6171 init_eh_for_function ();
6173 cse_not_expected = ! optimize;
6175 /* Caller save not needed yet. */
6176 caller_save_needed = 0;
6178 /* No stack slots have been made yet. */
6179 stack_slot_list = 0;
6181 current_function_has_nonlocal_label = 0;
6182 current_function_has_nonlocal_goto = 0;
6184 /* There is no stack slot for handling nonlocal gotos. */
6185 nonlocal_goto_handler_slots = 0;
6186 nonlocal_goto_stack_level = 0;
6188 /* No labels have been declared for nonlocal use. */
6189 nonlocal_labels = 0;
6190 nonlocal_goto_handler_labels = 0;
6192 /* No function calls so far in this function. */
6193 function_call_count = 0;
6195 /* No parm regs have been allocated.
6196 (This is important for output_inline_function.) */
6197 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6199 /* Initialize the RTL mechanism. */
6200 init_emit ();
6202 /* Initialize the queue of pending postincrement and postdecrements,
6203 and some other info in expr.c. */
6204 init_expr ();
6206 /* We haven't done register allocation yet. */
6207 reg_renumber = 0;
6209 init_varasm_status (cfun);
6211 /* Clear out data used for inlining. */
6212 cfun->inlinable = 0;
6213 cfun->original_decl_initial = 0;
6214 cfun->original_arg_vector = 0;
6216 cfun->stack_alignment_needed = STACK_BOUNDARY;
6217 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6219 /* Set if a call to setjmp is seen. */
6220 current_function_calls_setjmp = 0;
6222 /* Set if a call to longjmp is seen. */
6223 current_function_calls_longjmp = 0;
6225 current_function_calls_alloca = 0;
6226 current_function_contains_functions = 0;
6227 current_function_is_leaf = 0;
6228 current_function_nothrow = 0;
6229 current_function_sp_is_unchanging = 0;
6230 current_function_uses_only_leaf_regs = 0;
6231 current_function_has_computed_jump = 0;
6232 current_function_is_thunk = 0;
6234 current_function_returns_pcc_struct = 0;
6235 current_function_returns_struct = 0;
6236 current_function_epilogue_delay_list = 0;
6237 current_function_uses_const_pool = 0;
6238 current_function_uses_pic_offset_table = 0;
6239 current_function_cannot_inline = 0;
6241 /* We have not yet needed to make a label to jump to for tail-recursion. */
6242 tail_recursion_label = 0;
6244 /* We haven't had a need to make a save area for ap yet. */
6245 arg_pointer_save_area = 0;
6247 /* No stack slots allocated yet. */
6248 frame_offset = 0;
6250 /* No SAVE_EXPRs in this function yet. */
6251 save_expr_regs = 0;
6253 /* No RTL_EXPRs in this function yet. */
6254 rtl_expr_chain = 0;
6256 /* Set up to allocate temporaries. */
6257 init_temp_slots ();
6259 /* Indicate that we need to distinguish between the return value of the
6260 present function and the return value of a function being called. */
6261 rtx_equal_function_value_matters = 1;
6263 /* Indicate that we have not instantiated virtual registers yet. */
6264 virtuals_instantiated = 0;
6266 /* Indicate that we want CONCATs now. */
6267 generating_concat_p = 1;
6269 /* Indicate we have no need of a frame pointer yet. */
6270 frame_pointer_needed = 0;
6272 /* By default assume not stdarg. */
6273 current_function_stdarg = 0;
6275 /* We haven't made any trampolines for this function yet. */
6276 trampoline_list = 0;
6278 init_pending_stack_adjust ();
6279 inhibit_defer_pop = 0;
6281 current_function_outgoing_args_size = 0;
6283 current_function_funcdef_no = funcdef_no++;
6285 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6287 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6289 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6291 (*lang_hooks.function.init) (cfun);
6292 if (init_machine_status)
6293 cfun->machine = (*init_machine_status) ();
6296 /* Initialize the rtl expansion mechanism so that we can do simple things
6297 like generate sequences. This is used to provide a context during global
6298 initialization of some passes. */
6299 void
6300 init_dummy_function_start ()
6302 prepare_function_start ();
6305 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6306 and initialize static variables for generating RTL for the statements
6307 of the function. */
6309 void
6310 init_function_start (subr, filename, line)
6311 tree subr;
6312 const char *filename;
6313 int line;
6315 prepare_function_start ();
6317 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6318 cfun->decl = subr;
6320 /* Nonzero if this is a nested function that uses a static chain. */
6322 current_function_needs_context
6323 = (decl_function_context (current_function_decl) != 0
6324 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6326 /* Within function body, compute a type's size as soon it is laid out. */
6327 immediate_size_expand++;
6329 /* Prevent ever trying to delete the first instruction of a function.
6330 Also tell final how to output a linenum before the function prologue.
6331 Note linenums could be missing, e.g. when compiling a Java .class file. */
6332 if (line > 0)
6333 emit_line_note (filename, line);
6335 /* Make sure first insn is a note even if we don't want linenums.
6336 This makes sure the first insn will never be deleted.
6337 Also, final expects a note to appear there. */
6338 emit_note (NULL, NOTE_INSN_DELETED);
6340 /* Set flags used by final.c. */
6341 if (aggregate_value_p (DECL_RESULT (subr)))
6343 #ifdef PCC_STATIC_STRUCT_RETURN
6344 current_function_returns_pcc_struct = 1;
6345 #endif
6346 current_function_returns_struct = 1;
6349 /* Warn if this value is an aggregate type,
6350 regardless of which calling convention we are using for it. */
6351 if (warn_aggregate_return
6352 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6353 warning ("function returns an aggregate");
6355 current_function_returns_pointer
6356 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6359 /* Make sure all values used by the optimization passes have sane
6360 defaults. */
6361 void
6362 init_function_for_compilation ()
6364 reg_renumber = 0;
6366 /* No prologue/epilogue insns yet. */
6367 VARRAY_GROW (prologue, 0);
6368 VARRAY_GROW (epilogue, 0);
6369 VARRAY_GROW (sibcall_epilogue, 0);
6372 /* Expand a call to __main at the beginning of a possible main function. */
6374 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6375 #undef HAS_INIT_SECTION
6376 #define HAS_INIT_SECTION
6377 #endif
6379 void
6380 expand_main_function ()
6382 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6383 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6385 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6386 rtx tmp, seq;
6388 start_sequence ();
6389 /* Forcibly align the stack. */
6390 #ifdef STACK_GROWS_DOWNWARD
6391 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6392 stack_pointer_rtx, 1, OPTAB_WIDEN);
6393 #else
6394 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6395 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6396 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6397 stack_pointer_rtx, 1, OPTAB_WIDEN);
6398 #endif
6399 if (tmp != stack_pointer_rtx)
6400 emit_move_insn (stack_pointer_rtx, tmp);
6402 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6403 tmp = force_reg (Pmode, const0_rtx);
6404 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6405 seq = get_insns ();
6406 end_sequence ();
6408 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6409 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6410 break;
6411 if (tmp)
6412 emit_insn_before (seq, tmp);
6413 else
6414 emit_insn (seq);
6416 #endif
6418 #ifndef HAS_INIT_SECTION
6419 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6420 VOIDmode, 0);
6421 #endif
6424 extern struct obstack permanent_obstack;
6426 /* The PENDING_SIZES represent the sizes of variable-sized types.
6427 Create RTL for the various sizes now (using temporary variables),
6428 so that we can refer to the sizes from the RTL we are generating
6429 for the current function. The PENDING_SIZES are a TREE_LIST. The
6430 TREE_VALUE of each node is a SAVE_EXPR. */
6432 void
6433 expand_pending_sizes (pending_sizes)
6434 tree pending_sizes;
6436 tree tem;
6438 /* Evaluate now the sizes of any types declared among the arguments. */
6439 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6441 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6442 /* Flush the queue in case this parameter declaration has
6443 side-effects. */
6444 emit_queue ();
6448 /* Start the RTL for a new function, and set variables used for
6449 emitting RTL.
6450 SUBR is the FUNCTION_DECL node.
6451 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6452 the function's parameters, which must be run at any return statement. */
6454 void
6455 expand_function_start (subr, parms_have_cleanups)
6456 tree subr;
6457 int parms_have_cleanups;
6459 tree tem;
6460 rtx last_ptr = NULL_RTX;
6462 /* Make sure volatile mem refs aren't considered
6463 valid operands of arithmetic insns. */
6464 init_recog_no_volatile ();
6466 current_function_instrument_entry_exit
6467 = (flag_instrument_function_entry_exit
6468 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6470 current_function_profile
6471 = (profile_flag
6472 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6474 current_function_limit_stack
6475 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6477 /* If function gets a static chain arg, store it in the stack frame.
6478 Do this first, so it gets the first stack slot offset. */
6479 if (current_function_needs_context)
6481 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6483 /* Delay copying static chain if it is not a register to avoid
6484 conflicts with regs used for parameters. */
6485 if (! SMALL_REGISTER_CLASSES
6486 || GET_CODE (static_chain_incoming_rtx) == REG)
6487 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6490 /* If the parameters of this function need cleaning up, get a label
6491 for the beginning of the code which executes those cleanups. This must
6492 be done before doing anything with return_label. */
6493 if (parms_have_cleanups)
6494 cleanup_label = gen_label_rtx ();
6495 else
6496 cleanup_label = 0;
6498 /* Make the label for return statements to jump to. Do not special
6499 case machines with special return instructions -- they will be
6500 handled later during jump, ifcvt, or epilogue creation. */
6501 return_label = gen_label_rtx ();
6503 /* Initialize rtx used to return the value. */
6504 /* Do this before assign_parms so that we copy the struct value address
6505 before any library calls that assign parms might generate. */
6507 /* Decide whether to return the value in memory or in a register. */
6508 if (aggregate_value_p (DECL_RESULT (subr)))
6510 /* Returning something that won't go in a register. */
6511 rtx value_address = 0;
6513 #ifdef PCC_STATIC_STRUCT_RETURN
6514 if (current_function_returns_pcc_struct)
6516 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6517 value_address = assemble_static_space (size);
6519 else
6520 #endif
6522 /* Expect to be passed the address of a place to store the value.
6523 If it is passed as an argument, assign_parms will take care of
6524 it. */
6525 if (struct_value_incoming_rtx)
6527 value_address = gen_reg_rtx (Pmode);
6528 emit_move_insn (value_address, struct_value_incoming_rtx);
6531 if (value_address)
6533 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6534 set_mem_attributes (x, DECL_RESULT (subr), 1);
6535 SET_DECL_RTL (DECL_RESULT (subr), x);
6538 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6539 /* If return mode is void, this decl rtl should not be used. */
6540 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6541 else
6543 /* Compute the return values into a pseudo reg, which we will copy
6544 into the true return register after the cleanups are done. */
6546 /* In order to figure out what mode to use for the pseudo, we
6547 figure out what the mode of the eventual return register will
6548 actually be, and use that. */
6549 rtx hard_reg
6550 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6551 subr, 1);
6553 /* Structures that are returned in registers are not aggregate_value_p,
6554 so we may see a PARALLEL. Don't play pseudo games with this. */
6555 if (! REG_P (hard_reg))
6556 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6557 else
6559 /* Create the pseudo. */
6560 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6562 /* Needed because we may need to move this to memory
6563 in case it's a named return value whose address is taken. */
6564 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6568 /* Initialize rtx for parameters and local variables.
6569 In some cases this requires emitting insns. */
6571 assign_parms (subr);
6573 /* Copy the static chain now if it wasn't a register. The delay is to
6574 avoid conflicts with the parameter passing registers. */
6576 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6577 if (GET_CODE (static_chain_incoming_rtx) != REG)
6578 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6580 /* The following was moved from init_function_start.
6581 The move is supposed to make sdb output more accurate. */
6582 /* Indicate the beginning of the function body,
6583 as opposed to parm setup. */
6584 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6586 if (GET_CODE (get_last_insn ()) != NOTE)
6587 emit_note (NULL, NOTE_INSN_DELETED);
6588 parm_birth_insn = get_last_insn ();
6590 context_display = 0;
6591 if (current_function_needs_context)
6593 /* Fetch static chain values for containing functions. */
6594 tem = decl_function_context (current_function_decl);
6595 /* Copy the static chain pointer into a pseudo. If we have
6596 small register classes, copy the value from memory if
6597 static_chain_incoming_rtx is a REG. */
6598 if (tem)
6600 /* If the static chain originally came in a register, put it back
6601 there, then move it out in the next insn. The reason for
6602 this peculiar code is to satisfy function integration. */
6603 if (SMALL_REGISTER_CLASSES
6604 && GET_CODE (static_chain_incoming_rtx) == REG)
6605 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6606 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6609 while (tem)
6611 tree rtlexp = make_node (RTL_EXPR);
6613 RTL_EXPR_RTL (rtlexp) = last_ptr;
6614 context_display = tree_cons (tem, rtlexp, context_display);
6615 tem = decl_function_context (tem);
6616 if (tem == 0)
6617 break;
6618 /* Chain thru stack frames, assuming pointer to next lexical frame
6619 is found at the place we always store it. */
6620 #ifdef FRAME_GROWS_DOWNWARD
6621 last_ptr = plus_constant (last_ptr,
6622 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6623 #endif
6624 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6625 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6626 last_ptr = copy_to_reg (last_ptr);
6628 /* If we are not optimizing, ensure that we know that this
6629 piece of context is live over the entire function. */
6630 if (! optimize)
6631 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6632 save_expr_regs);
6636 if (current_function_instrument_entry_exit)
6638 rtx fun = DECL_RTL (current_function_decl);
6639 if (GET_CODE (fun) == MEM)
6640 fun = XEXP (fun, 0);
6641 else
6642 abort ();
6643 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6644 2, fun, Pmode,
6645 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6647 hard_frame_pointer_rtx),
6648 Pmode);
6651 if (current_function_profile)
6653 #ifdef PROFILE_HOOK
6654 PROFILE_HOOK (current_function_funcdef_no);
6655 #endif
6658 /* After the display initializations is where the tail-recursion label
6659 should go, if we end up needing one. Ensure we have a NOTE here
6660 since some things (like trampolines) get placed before this. */
6661 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6663 /* Evaluate now the sizes of any types declared among the arguments. */
6664 expand_pending_sizes (nreverse (get_pending_sizes ()));
6666 /* Make sure there is a line number after the function entry setup code. */
6667 force_next_line_note ();
6670 /* Undo the effects of init_dummy_function_start. */
6671 void
6672 expand_dummy_function_end ()
6674 /* End any sequences that failed to be closed due to syntax errors. */
6675 while (in_sequence_p ())
6676 end_sequence ();
6678 /* Outside function body, can't compute type's actual size
6679 until next function's body starts. */
6681 free_after_parsing (cfun);
6682 free_after_compilation (cfun);
6683 cfun = 0;
6686 /* Call DOIT for each hard register used as a return value from
6687 the current function. */
6689 void
6690 diddle_return_value (doit, arg)
6691 void (*doit) PARAMS ((rtx, void *));
6692 void *arg;
6694 rtx outgoing = current_function_return_rtx;
6696 if (! outgoing)
6697 return;
6699 if (GET_CODE (outgoing) == REG)
6700 (*doit) (outgoing, arg);
6701 else if (GET_CODE (outgoing) == PARALLEL)
6703 int i;
6705 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6707 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6709 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6710 (*doit) (x, arg);
6715 static void
6716 do_clobber_return_reg (reg, arg)
6717 rtx reg;
6718 void *arg ATTRIBUTE_UNUSED;
6720 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6723 void
6724 clobber_return_register ()
6726 diddle_return_value (do_clobber_return_reg, NULL);
6728 /* In case we do use pseudo to return value, clobber it too. */
6729 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6731 tree decl_result = DECL_RESULT (current_function_decl);
6732 rtx decl_rtl = DECL_RTL (decl_result);
6733 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6735 do_clobber_return_reg (decl_rtl, NULL);
6740 static void
6741 do_use_return_reg (reg, arg)
6742 rtx reg;
6743 void *arg ATTRIBUTE_UNUSED;
6745 emit_insn (gen_rtx_USE (VOIDmode, reg));
6748 void
6749 use_return_register ()
6751 diddle_return_value (do_use_return_reg, NULL);
6754 static GTY(()) rtx initial_trampoline;
6756 /* Generate RTL for the end of the current function.
6757 FILENAME and LINE are the current position in the source file.
6759 It is up to language-specific callers to do cleanups for parameters--
6760 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6762 void
6763 expand_function_end (filename, line, end_bindings)
6764 const char *filename;
6765 int line;
6766 int end_bindings;
6768 tree link;
6769 rtx clobber_after;
6771 finish_expr_for_function ();
6773 /* If arg_pointer_save_area was referenced only from a nested
6774 function, we will not have initialized it yet. Do that now. */
6775 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6776 get_arg_pointer_save_area (cfun);
6778 #ifdef NON_SAVING_SETJMP
6779 /* Don't put any variables in registers if we call setjmp
6780 on a machine that fails to restore the registers. */
6781 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6783 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6784 setjmp_protect (DECL_INITIAL (current_function_decl));
6786 setjmp_protect_args ();
6788 #endif
6790 /* Initialize any trampolines required by this function. */
6791 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6793 tree function = TREE_PURPOSE (link);
6794 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6795 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6796 #ifdef TRAMPOLINE_TEMPLATE
6797 rtx blktramp;
6798 #endif
6799 rtx seq;
6801 #ifdef TRAMPOLINE_TEMPLATE
6802 /* First make sure this compilation has a template for
6803 initializing trampolines. */
6804 if (initial_trampoline == 0)
6806 initial_trampoline
6807 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6808 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6810 #endif
6812 /* Generate insns to initialize the trampoline. */
6813 start_sequence ();
6814 tramp = round_trampoline_addr (XEXP (tramp, 0));
6815 #ifdef TRAMPOLINE_TEMPLATE
6816 blktramp = replace_equiv_address (initial_trampoline, tramp);
6817 emit_block_move (blktramp, initial_trampoline,
6818 GEN_INT (TRAMPOLINE_SIZE));
6819 #endif
6820 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6821 seq = get_insns ();
6822 end_sequence ();
6824 /* Put those insns at entry to the containing function (this one). */
6825 emit_insn_before (seq, tail_recursion_reentry);
6828 /* If we are doing stack checking and this function makes calls,
6829 do a stack probe at the start of the function to ensure we have enough
6830 space for another stack frame. */
6831 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6833 rtx insn, seq;
6835 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6836 if (GET_CODE (insn) == CALL_INSN)
6838 start_sequence ();
6839 probe_stack_range (STACK_CHECK_PROTECT,
6840 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6841 seq = get_insns ();
6842 end_sequence ();
6843 emit_insn_before (seq, tail_recursion_reentry);
6844 break;
6848 /* Warn about unused parms if extra warnings were specified. */
6849 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6850 warning. WARN_UNUSED_PARAMETER is negative when set by
6851 -Wunused. */
6852 if (warn_unused_parameter > 0
6853 || (warn_unused_parameter < 0 && extra_warnings))
6855 tree decl;
6857 for (decl = DECL_ARGUMENTS (current_function_decl);
6858 decl; decl = TREE_CHAIN (decl))
6859 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6860 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6861 warning_with_decl (decl, "unused parameter `%s'");
6864 /* Delete handlers for nonlocal gotos if nothing uses them. */
6865 if (nonlocal_goto_handler_slots != 0
6866 && ! current_function_has_nonlocal_label)
6867 delete_handlers ();
6869 /* End any sequences that failed to be closed due to syntax errors. */
6870 while (in_sequence_p ())
6871 end_sequence ();
6873 /* Outside function body, can't compute type's actual size
6874 until next function's body starts. */
6875 immediate_size_expand--;
6877 clear_pending_stack_adjust ();
6878 do_pending_stack_adjust ();
6880 /* Mark the end of the function body.
6881 If control reaches this insn, the function can drop through
6882 without returning a value. */
6883 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6885 /* Must mark the last line number note in the function, so that the test
6886 coverage code can avoid counting the last line twice. This just tells
6887 the code to ignore the immediately following line note, since there
6888 already exists a copy of this note somewhere above. This line number
6889 note is still needed for debugging though, so we can't delete it. */
6890 if (flag_test_coverage)
6891 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6893 /* Output a linenumber for the end of the function.
6894 SDB depends on this. */
6895 emit_line_note_force (filename, line);
6897 /* Before the return label (if any), clobber the return
6898 registers so that they are not propagated live to the rest of
6899 the function. This can only happen with functions that drop
6900 through; if there had been a return statement, there would
6901 have either been a return rtx, or a jump to the return label.
6903 We delay actual code generation after the current_function_value_rtx
6904 is computed. */
6905 clobber_after = get_last_insn ();
6907 /* Output the label for the actual return from the function,
6908 if one is expected. This happens either because a function epilogue
6909 is used instead of a return instruction, or because a return was done
6910 with a goto in order to run local cleanups, or because of pcc-style
6911 structure returning. */
6912 if (return_label)
6913 emit_label (return_label);
6915 /* C++ uses this. */
6916 if (end_bindings)
6917 expand_end_bindings (0, 0, 0);
6919 if (current_function_instrument_entry_exit)
6921 rtx fun = DECL_RTL (current_function_decl);
6922 if (GET_CODE (fun) == MEM)
6923 fun = XEXP (fun, 0);
6924 else
6925 abort ();
6926 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6927 2, fun, Pmode,
6928 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6930 hard_frame_pointer_rtx),
6931 Pmode);
6934 /* Let except.c know where it should emit the call to unregister
6935 the function context for sjlj exceptions. */
6936 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6937 sjlj_emit_function_exit_after (get_last_insn ());
6939 /* If we had calls to alloca, and this machine needs
6940 an accurate stack pointer to exit the function,
6941 insert some code to save and restore the stack pointer. */
6942 #ifdef EXIT_IGNORE_STACK
6943 if (! EXIT_IGNORE_STACK)
6944 #endif
6945 if (current_function_calls_alloca)
6947 rtx tem = 0;
6949 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6950 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6953 /* If scalar return value was computed in a pseudo-reg, or was a named
6954 return value that got dumped to the stack, copy that to the hard
6955 return register. */
6956 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6958 tree decl_result = DECL_RESULT (current_function_decl);
6959 rtx decl_rtl = DECL_RTL (decl_result);
6961 if (REG_P (decl_rtl)
6962 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6963 : DECL_REGISTER (decl_result))
6965 rtx real_decl_rtl = current_function_return_rtx;
6967 /* This should be set in assign_parms. */
6968 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
6969 abort ();
6971 /* If this is a BLKmode structure being returned in registers,
6972 then use the mode computed in expand_return. Note that if
6973 decl_rtl is memory, then its mode may have been changed,
6974 but that current_function_return_rtx has not. */
6975 if (GET_MODE (real_decl_rtl) == BLKmode)
6976 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
6978 /* If a named return value dumped decl_return to memory, then
6979 we may need to re-do the PROMOTE_MODE signed/unsigned
6980 extension. */
6981 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6983 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6985 #ifdef PROMOTE_FUNCTION_RETURN
6986 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6987 &unsignedp, 1);
6988 #endif
6990 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6992 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6993 emit_group_load (real_decl_rtl, decl_rtl,
6994 int_size_in_bytes (TREE_TYPE (decl_result)));
6995 else
6996 emit_move_insn (real_decl_rtl, decl_rtl);
7000 /* If returning a structure, arrange to return the address of the value
7001 in a place where debuggers expect to find it.
7003 If returning a structure PCC style,
7004 the caller also depends on this value.
7005 And current_function_returns_pcc_struct is not necessarily set. */
7006 if (current_function_returns_struct
7007 || current_function_returns_pcc_struct)
7009 rtx value_address
7010 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7011 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7012 #ifdef FUNCTION_OUTGOING_VALUE
7013 rtx outgoing
7014 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7015 current_function_decl);
7016 #else
7017 rtx outgoing
7018 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7019 #endif
7021 /* Mark this as a function return value so integrate will delete the
7022 assignment and USE below when inlining this function. */
7023 REG_FUNCTION_VALUE_P (outgoing) = 1;
7025 #ifdef POINTERS_EXTEND_UNSIGNED
7026 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7027 if (GET_MODE (outgoing) != GET_MODE (value_address))
7028 value_address = convert_memory_address (GET_MODE (outgoing),
7029 value_address);
7030 #endif
7032 emit_move_insn (outgoing, value_address);
7034 /* Show return register used to hold result (in this case the address
7035 of the result. */
7036 current_function_return_rtx = outgoing;
7039 /* If this is an implementation of throw, do what's necessary to
7040 communicate between __builtin_eh_return and the epilogue. */
7041 expand_eh_return ();
7043 /* Emit the actual code to clobber return register. */
7045 rtx seq, after;
7047 start_sequence ();
7048 clobber_return_register ();
7049 seq = get_insns ();
7050 end_sequence ();
7052 after = emit_insn_after (seq, clobber_after);
7054 if (clobber_after != after)
7055 cfun->x_clobber_return_insn = after;
7058 /* ??? This should no longer be necessary since stupid is no longer with
7059 us, but there are some parts of the compiler (eg reload_combine, and
7060 sh mach_dep_reorg) that still try and compute their own lifetime info
7061 instead of using the general framework. */
7062 use_return_register ();
7064 /* Fix up any gotos that jumped out to the outermost
7065 binding level of the function.
7066 Must follow emitting RETURN_LABEL. */
7068 /* If you have any cleanups to do at this point,
7069 and they need to create temporary variables,
7070 then you will lose. */
7071 expand_fixups (get_insns ());
7075 get_arg_pointer_save_area (f)
7076 struct function *f;
7078 rtx ret = f->x_arg_pointer_save_area;
7080 if (! ret)
7082 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7083 f->x_arg_pointer_save_area = ret;
7086 if (f == cfun && ! f->arg_pointer_save_area_init)
7088 rtx seq;
7090 /* Save the arg pointer at the beginning of the function. The
7091 generated stack slot may not be a valid memory address, so we
7092 have to check it and fix it if necessary. */
7093 start_sequence ();
7094 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7095 seq = get_insns ();
7096 end_sequence ();
7098 push_topmost_sequence ();
7099 emit_insn_after (seq, get_insns ());
7100 pop_topmost_sequence ();
7103 return ret;
7106 /* Extend a vector that records the INSN_UIDs of INSNS
7107 (a list of one or more insns). */
7109 static void
7110 record_insns (insns, vecp)
7111 rtx insns;
7112 varray_type *vecp;
7114 int i, len;
7115 rtx tmp;
7117 tmp = insns;
7118 len = 0;
7119 while (tmp != NULL_RTX)
7121 len++;
7122 tmp = NEXT_INSN (tmp);
7125 i = VARRAY_SIZE (*vecp);
7126 VARRAY_GROW (*vecp, i + len);
7127 tmp = insns;
7128 while (tmp != NULL_RTX)
7130 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7131 i++;
7132 tmp = NEXT_INSN (tmp);
7136 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7137 be running after reorg, SEQUENCE rtl is possible. */
7139 static int
7140 contains (insn, vec)
7141 rtx insn;
7142 varray_type vec;
7144 int i, j;
7146 if (GET_CODE (insn) == INSN
7147 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7149 int count = 0;
7150 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7151 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7152 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7153 count++;
7154 return count;
7156 else
7158 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7159 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7160 return 1;
7162 return 0;
7166 prologue_epilogue_contains (insn)
7167 rtx insn;
7169 if (contains (insn, prologue))
7170 return 1;
7171 if (contains (insn, epilogue))
7172 return 1;
7173 return 0;
7177 sibcall_epilogue_contains (insn)
7178 rtx insn;
7180 if (sibcall_epilogue)
7181 return contains (insn, sibcall_epilogue);
7182 return 0;
7185 #ifdef HAVE_return
7186 /* Insert gen_return at the end of block BB. This also means updating
7187 block_for_insn appropriately. */
7189 static void
7190 emit_return_into_block (bb, line_note)
7191 basic_block bb;
7192 rtx line_note;
7194 rtx p, end;
7196 p = NEXT_INSN (bb->end);
7197 end = emit_jump_insn_after (gen_return (), bb->end);
7198 if (line_note)
7199 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7200 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7202 #endif /* HAVE_return */
7204 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7206 /* These functions convert the epilogue into a variant that does not modify the
7207 stack pointer. This is used in cases where a function returns an object
7208 whose size is not known until it is computed. The called function leaves the
7209 object on the stack, leaves the stack depressed, and returns a pointer to
7210 the object.
7212 What we need to do is track all modifications and references to the stack
7213 pointer, deleting the modifications and changing the references to point to
7214 the location the stack pointer would have pointed to had the modifications
7215 taken place.
7217 These functions need to be portable so we need to make as few assumptions
7218 about the epilogue as we can. However, the epilogue basically contains
7219 three things: instructions to reset the stack pointer, instructions to
7220 reload registers, possibly including the frame pointer, and an
7221 instruction to return to the caller.
7223 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7224 We also make no attempt to validate the insns we make since if they are
7225 invalid, we probably can't do anything valid. The intent is that these
7226 routines get "smarter" as more and more machines start to use them and
7227 they try operating on different epilogues.
7229 We use the following structure to track what the part of the epilogue that
7230 we've already processed has done. We keep two copies of the SP equivalence,
7231 one for use during the insn we are processing and one for use in the next
7232 insn. The difference is because one part of a PARALLEL may adjust SP
7233 and the other may use it. */
7235 struct epi_info
7237 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7238 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7239 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7240 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7241 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7242 should be set to once we no longer need
7243 its value. */
7246 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7247 static void emit_equiv_load PARAMS ((struct epi_info *));
7249 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7250 no modifications to the stack pointer. Return the new list of insns. */
7252 static rtx
7253 keep_stack_depressed (insns)
7254 rtx insns;
7256 int j;
7257 struct epi_info info;
7258 rtx insn, next;
7260 /* If the epilogue is just a single instruction, it ust be OK as is. */
7262 if (NEXT_INSN (insns) == NULL_RTX)
7263 return insns;
7265 /* Otherwise, start a sequence, initialize the information we have, and
7266 process all the insns we were given. */
7267 start_sequence ();
7269 info.sp_equiv_reg = stack_pointer_rtx;
7270 info.sp_offset = 0;
7271 info.equiv_reg_src = 0;
7273 insn = insns;
7274 next = NULL_RTX;
7275 while (insn != NULL_RTX)
7277 next = NEXT_INSN (insn);
7279 if (!INSN_P (insn))
7281 add_insn (insn);
7282 insn = next;
7283 continue;
7286 /* If this insn references the register that SP is equivalent to and
7287 we have a pending load to that register, we must force out the load
7288 first and then indicate we no longer know what SP's equivalent is. */
7289 if (info.equiv_reg_src != 0
7290 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7292 emit_equiv_load (&info);
7293 info.sp_equiv_reg = 0;
7296 info.new_sp_equiv_reg = info.sp_equiv_reg;
7297 info.new_sp_offset = info.sp_offset;
7299 /* If this is a (RETURN) and the return address is on the stack,
7300 update the address and change to an indirect jump. */
7301 if (GET_CODE (PATTERN (insn)) == RETURN
7302 || (GET_CODE (PATTERN (insn)) == PARALLEL
7303 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7305 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7306 rtx base = 0;
7307 HOST_WIDE_INT offset = 0;
7308 rtx jump_insn, jump_set;
7310 /* If the return address is in a register, we can emit the insn
7311 unchanged. Otherwise, it must be a MEM and we see what the
7312 base register and offset are. In any case, we have to emit any
7313 pending load to the equivalent reg of SP, if any. */
7314 if (GET_CODE (retaddr) == REG)
7316 emit_equiv_load (&info);
7317 add_insn (insn);
7318 insn = next;
7319 continue;
7321 else if (GET_CODE (retaddr) == MEM
7322 && GET_CODE (XEXP (retaddr, 0)) == REG)
7323 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7324 else if (GET_CODE (retaddr) == MEM
7325 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7326 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7327 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7329 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7330 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7332 else
7333 abort ();
7335 /* If the base of the location containing the return pointer
7336 is SP, we must update it with the replacement address. Otherwise,
7337 just build the necessary MEM. */
7338 retaddr = plus_constant (base, offset);
7339 if (base == stack_pointer_rtx)
7340 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7341 plus_constant (info.sp_equiv_reg,
7342 info.sp_offset));
7344 retaddr = gen_rtx_MEM (Pmode, retaddr);
7346 /* If there is a pending load to the equivalent register for SP
7347 and we reference that register, we must load our address into
7348 a scratch register and then do that load. */
7349 if (info.equiv_reg_src
7350 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7352 unsigned int regno;
7353 rtx reg;
7355 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7356 if (HARD_REGNO_MODE_OK (regno, Pmode)
7357 && !fixed_regs[regno]
7358 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7359 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7360 regno)
7361 && !refers_to_regno_p (regno,
7362 regno + HARD_REGNO_NREGS (regno,
7363 Pmode),
7364 info.equiv_reg_src, NULL))
7365 break;
7367 if (regno == FIRST_PSEUDO_REGISTER)
7368 abort ();
7370 reg = gen_rtx_REG (Pmode, regno);
7371 emit_move_insn (reg, retaddr);
7372 retaddr = reg;
7375 emit_equiv_load (&info);
7376 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7378 /* Show the SET in the above insn is a RETURN. */
7379 jump_set = single_set (jump_insn);
7380 if (jump_set == 0)
7381 abort ();
7382 else
7383 SET_IS_RETURN_P (jump_set) = 1;
7386 /* If SP is not mentioned in the pattern and its equivalent register, if
7387 any, is not modified, just emit it. Otherwise, if neither is set,
7388 replace the reference to SP and emit the insn. If none of those are
7389 true, handle each SET individually. */
7390 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7391 && (info.sp_equiv_reg == stack_pointer_rtx
7392 || !reg_set_p (info.sp_equiv_reg, insn)))
7393 add_insn (insn);
7394 else if (! reg_set_p (stack_pointer_rtx, insn)
7395 && (info.sp_equiv_reg == stack_pointer_rtx
7396 || !reg_set_p (info.sp_equiv_reg, insn)))
7398 if (! validate_replace_rtx (stack_pointer_rtx,
7399 plus_constant (info.sp_equiv_reg,
7400 info.sp_offset),
7401 insn))
7402 abort ();
7404 add_insn (insn);
7406 else if (GET_CODE (PATTERN (insn)) == SET)
7407 handle_epilogue_set (PATTERN (insn), &info);
7408 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7410 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7411 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7412 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7414 else
7415 add_insn (insn);
7417 info.sp_equiv_reg = info.new_sp_equiv_reg;
7418 info.sp_offset = info.new_sp_offset;
7420 insn = next;
7423 insns = get_insns ();
7424 end_sequence ();
7425 return insns;
7428 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7429 structure that contains information about what we've seen so far. We
7430 process this SET by either updating that data or by emitting one or
7431 more insns. */
7433 static void
7434 handle_epilogue_set (set, p)
7435 rtx set;
7436 struct epi_info *p;
7438 /* First handle the case where we are setting SP. Record what it is being
7439 set from. If unknown, abort. */
7440 if (reg_set_p (stack_pointer_rtx, set))
7442 if (SET_DEST (set) != stack_pointer_rtx)
7443 abort ();
7445 if (GET_CODE (SET_SRC (set)) == PLUS
7446 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7448 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7449 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7451 else
7452 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7454 /* If we are adjusting SP, we adjust from the old data. */
7455 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7457 p->new_sp_equiv_reg = p->sp_equiv_reg;
7458 p->new_sp_offset += p->sp_offset;
7461 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7462 abort ();
7464 return;
7467 /* Next handle the case where we are setting SP's equivalent register.
7468 If we already have a value to set it to, abort. We could update, but
7469 there seems little point in handling that case. Note that we have
7470 to allow for the case where we are setting the register set in
7471 the previous part of a PARALLEL inside a single insn. But use the
7472 old offset for any updates within this insn. */
7473 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7475 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7476 || p->equiv_reg_src != 0)
7477 abort ();
7478 else
7479 p->equiv_reg_src
7480 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7481 plus_constant (p->sp_equiv_reg,
7482 p->sp_offset));
7485 /* Otherwise, replace any references to SP in the insn to its new value
7486 and emit the insn. */
7487 else
7489 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7490 plus_constant (p->sp_equiv_reg,
7491 p->sp_offset));
7492 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7493 plus_constant (p->sp_equiv_reg,
7494 p->sp_offset));
7495 emit_insn (set);
7499 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7501 static void
7502 emit_equiv_load (p)
7503 struct epi_info *p;
7505 if (p->equiv_reg_src != 0)
7506 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7508 p->equiv_reg_src = 0;
7510 #endif
7512 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7513 this into place with notes indicating where the prologue ends and where
7514 the epilogue begins. Update the basic block information when possible. */
7516 void
7517 thread_prologue_and_epilogue_insns (f)
7518 rtx f ATTRIBUTE_UNUSED;
7520 int inserted = 0;
7521 edge e;
7522 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7523 rtx seq;
7524 #endif
7525 #ifdef HAVE_prologue
7526 rtx prologue_end = NULL_RTX;
7527 #endif
7528 #if defined (HAVE_epilogue) || defined(HAVE_return)
7529 rtx epilogue_end = NULL_RTX;
7530 #endif
7532 #ifdef HAVE_prologue
7533 if (HAVE_prologue)
7535 start_sequence ();
7536 seq = gen_prologue ();
7537 emit_insn (seq);
7539 /* Retain a map of the prologue insns. */
7540 record_insns (seq, &prologue);
7541 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7543 seq = get_insns ();
7544 end_sequence ();
7546 /* Can't deal with multiple successors of the entry block
7547 at the moment. Function should always have at least one
7548 entry point. */
7549 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7550 abort ();
7552 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7553 inserted = 1;
7555 #endif
7557 /* If the exit block has no non-fake predecessors, we don't need
7558 an epilogue. */
7559 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7560 if ((e->flags & EDGE_FAKE) == 0)
7561 break;
7562 if (e == NULL)
7563 goto epilogue_done;
7565 #ifdef HAVE_return
7566 if (optimize && HAVE_return)
7568 /* If we're allowed to generate a simple return instruction,
7569 then by definition we don't need a full epilogue. Examine
7570 the block that falls through to EXIT. If it does not
7571 contain any code, examine its predecessors and try to
7572 emit (conditional) return instructions. */
7574 basic_block last;
7575 edge e_next;
7576 rtx label;
7578 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7579 if (e->flags & EDGE_FALLTHRU)
7580 break;
7581 if (e == NULL)
7582 goto epilogue_done;
7583 last = e->src;
7585 /* Verify that there are no active instructions in the last block. */
7586 label = last->end;
7587 while (label && GET_CODE (label) != CODE_LABEL)
7589 if (active_insn_p (label))
7590 break;
7591 label = PREV_INSN (label);
7594 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7596 rtx epilogue_line_note = NULL_RTX;
7598 /* Locate the line number associated with the closing brace,
7599 if we can find one. */
7600 for (seq = get_last_insn ();
7601 seq && ! active_insn_p (seq);
7602 seq = PREV_INSN (seq))
7603 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7605 epilogue_line_note = seq;
7606 break;
7609 for (e = last->pred; e; e = e_next)
7611 basic_block bb = e->src;
7612 rtx jump;
7614 e_next = e->pred_next;
7615 if (bb == ENTRY_BLOCK_PTR)
7616 continue;
7618 jump = bb->end;
7619 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7620 continue;
7622 /* If we have an unconditional jump, we can replace that
7623 with a simple return instruction. */
7624 if (simplejump_p (jump))
7626 emit_return_into_block (bb, epilogue_line_note);
7627 delete_insn (jump);
7630 /* If we have a conditional jump, we can try to replace
7631 that with a conditional return instruction. */
7632 else if (condjump_p (jump))
7634 rtx ret, *loc;
7636 ret = SET_SRC (PATTERN (jump));
7637 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7638 loc = &XEXP (ret, 1);
7639 else
7640 loc = &XEXP (ret, 2);
7641 ret = gen_rtx_RETURN (VOIDmode);
7643 if (! validate_change (jump, loc, ret, 0))
7644 continue;
7645 if (JUMP_LABEL (jump))
7646 LABEL_NUSES (JUMP_LABEL (jump))--;
7648 /* If this block has only one successor, it both jumps
7649 and falls through to the fallthru block, so we can't
7650 delete the edge. */
7651 if (bb->succ->succ_next == NULL)
7652 continue;
7654 else
7655 continue;
7657 /* Fix up the CFG for the successful change we just made. */
7658 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7661 /* Emit a return insn for the exit fallthru block. Whether
7662 this is still reachable will be determined later. */
7664 emit_barrier_after (last->end);
7665 emit_return_into_block (last, epilogue_line_note);
7666 epilogue_end = last->end;
7667 last->succ->flags &= ~EDGE_FALLTHRU;
7668 goto epilogue_done;
7671 #endif
7672 #ifdef HAVE_epilogue
7673 if (HAVE_epilogue)
7675 /* Find the edge that falls through to EXIT. Other edges may exist
7676 due to RETURN instructions, but those don't need epilogues.
7677 There really shouldn't be a mixture -- either all should have
7678 been converted or none, however... */
7680 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7681 if (e->flags & EDGE_FALLTHRU)
7682 break;
7683 if (e == NULL)
7684 goto epilogue_done;
7686 start_sequence ();
7687 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7689 seq = gen_epilogue ();
7691 #ifdef INCOMING_RETURN_ADDR_RTX
7692 /* If this function returns with the stack depressed and we can support
7693 it, massage the epilogue to actually do that. */
7694 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7695 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7696 seq = keep_stack_depressed (seq);
7697 #endif
7699 emit_jump_insn (seq);
7701 /* Retain a map of the epilogue insns. */
7702 record_insns (seq, &epilogue);
7704 seq = get_insns ();
7705 end_sequence ();
7707 insert_insn_on_edge (seq, e);
7708 inserted = 1;
7710 #endif
7711 epilogue_done:
7713 if (inserted)
7714 commit_edge_insertions ();
7716 #ifdef HAVE_sibcall_epilogue
7717 /* Emit sibling epilogues before any sibling call sites. */
7718 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7720 basic_block bb = e->src;
7721 rtx insn = bb->end;
7722 rtx i;
7723 rtx newinsn;
7725 if (GET_CODE (insn) != CALL_INSN
7726 || ! SIBLING_CALL_P (insn))
7727 continue;
7729 start_sequence ();
7730 emit_insn (gen_sibcall_epilogue ());
7731 seq = get_insns ();
7732 end_sequence ();
7734 /* Retain a map of the epilogue insns. Used in life analysis to
7735 avoid getting rid of sibcall epilogue insns. Do this before we
7736 actually emit the sequence. */
7737 record_insns (seq, &sibcall_epilogue);
7739 i = PREV_INSN (insn);
7740 newinsn = emit_insn_before (seq, insn);
7742 #endif
7744 #ifdef HAVE_prologue
7745 if (prologue_end)
7747 rtx insn, prev;
7749 /* GDB handles `break f' by setting a breakpoint on the first
7750 line note after the prologue. Which means (1) that if
7751 there are line number notes before where we inserted the
7752 prologue we should move them, and (2) we should generate a
7753 note before the end of the first basic block, if there isn't
7754 one already there.
7756 ??? This behaviour is completely broken when dealing with
7757 multiple entry functions. We simply place the note always
7758 into first basic block and let alternate entry points
7759 to be missed.
7762 for (insn = prologue_end; insn; insn = prev)
7764 prev = PREV_INSN (insn);
7765 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7767 /* Note that we cannot reorder the first insn in the
7768 chain, since rest_of_compilation relies on that
7769 remaining constant. */
7770 if (prev == NULL)
7771 break;
7772 reorder_insns (insn, insn, prologue_end);
7776 /* Find the last line number note in the first block. */
7777 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7778 insn != prologue_end && insn;
7779 insn = PREV_INSN (insn))
7780 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7781 break;
7783 /* If we didn't find one, make a copy of the first line number
7784 we run across. */
7785 if (! insn)
7787 for (insn = next_active_insn (prologue_end);
7788 insn;
7789 insn = PREV_INSN (insn))
7790 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7792 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7793 NOTE_LINE_NUMBER (insn),
7794 prologue_end);
7795 break;
7799 #endif
7800 #ifdef HAVE_epilogue
7801 if (epilogue_end)
7803 rtx insn, next;
7805 /* Similarly, move any line notes that appear after the epilogue.
7806 There is no need, however, to be quite so anal about the existence
7807 of such a note. */
7808 for (insn = epilogue_end; insn; insn = next)
7810 next = NEXT_INSN (insn);
7811 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7812 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7815 #endif
7818 /* Reposition the prologue-end and epilogue-begin notes after instruction
7819 scheduling and delayed branch scheduling. */
7821 void
7822 reposition_prologue_and_epilogue_notes (f)
7823 rtx f ATTRIBUTE_UNUSED;
7825 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7826 rtx insn, last, note;
7827 int len;
7829 if ((len = VARRAY_SIZE (prologue)) > 0)
7831 last = 0, note = 0;
7833 /* Scan from the beginning until we reach the last prologue insn.
7834 We apparently can't depend on basic_block_{head,end} after
7835 reorg has run. */
7836 for (insn = f; insn; insn = NEXT_INSN (insn))
7838 if (GET_CODE (insn) == NOTE)
7840 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7841 note = insn;
7843 else if (contains (insn, prologue))
7845 last = insn;
7846 if (--len == 0)
7847 break;
7851 if (last)
7853 rtx next;
7855 /* Find the prologue-end note if we haven't already, and
7856 move it to just after the last prologue insn. */
7857 if (note == 0)
7859 for (note = last; (note = NEXT_INSN (note));)
7860 if (GET_CODE (note) == NOTE
7861 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7862 break;
7865 next = NEXT_INSN (note);
7867 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7868 if (GET_CODE (last) == CODE_LABEL)
7869 last = NEXT_INSN (last);
7870 reorder_insns (note, note, last);
7874 if ((len = VARRAY_SIZE (epilogue)) > 0)
7876 last = 0, note = 0;
7878 /* Scan from the end until we reach the first epilogue insn.
7879 We apparently can't depend on basic_block_{head,end} after
7880 reorg has run. */
7881 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7883 if (GET_CODE (insn) == NOTE)
7885 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7886 note = insn;
7888 else if (contains (insn, epilogue))
7890 last = insn;
7891 if (--len == 0)
7892 break;
7896 if (last)
7898 /* Find the epilogue-begin note if we haven't already, and
7899 move it to just before the first epilogue insn. */
7900 if (note == 0)
7902 for (note = insn; (note = PREV_INSN (note));)
7903 if (GET_CODE (note) == NOTE
7904 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7905 break;
7908 if (PREV_INSN (last) != note)
7909 reorder_insns (note, note, PREV_INSN (last));
7912 #endif /* HAVE_prologue or HAVE_epilogue */
7915 /* Called once, at initialization, to initialize function.c. */
7917 void
7918 init_function_once ()
7920 VARRAY_INT_INIT (prologue, 0, "prologue");
7921 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7922 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7925 #include "gt-function.h"