Enhancements for the if-conversion pass
[official-gcc.git] / gcc / function.c
blobcd25804fb3018be7bd12ed741d69e123005ce451
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "toplev.h"
58 #include "hashtab.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
62 #include "langhooks.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
76 #ifndef NAME__MAIN
77 #define NAME__MAIN "__main"
78 #define SYMBOL__MAIN __main
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
103 compiler passes. */
104 int current_function_is_leaf;
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
109 int current_function_nothrow;
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
127 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
128 static int funcdef_no;
130 /* These variables hold pointers to functions to create and destroy
131 target specific, per-function data structures. */
132 struct machine_function * (*init_machine_status) PARAMS ((void));
134 /* The FUNCTION_DECL for an inline function currently being expanded. */
135 tree inline_function_decl;
137 /* The currently compiled function. */
138 struct function *cfun = 0;
140 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
141 static GTY(()) varray_type prologue;
142 static GTY(()) varray_type epilogue;
144 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
145 in this function. */
146 static GTY(()) varray_type sibcall_epilogue;
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
166 struct temp_slot GTY(())
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 unsigned int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The type of the object in the slot, or zero if it doesn't correspond
180 to a type. We use this to determine whether a slot can be reused.
181 It can be reused if objects of the type of the new slot will always
182 conflict with objects of the type of the old slot. */
183 tree type;
184 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
185 tree rtl_expr;
186 /* Non-zero if this temporary is currently in use. */
187 char in_use;
188 /* Non-zero if this temporary has its address taken. */
189 char addr_taken;
190 /* Nesting level at which this slot is being used. */
191 int level;
192 /* Non-zero if this should survive a call to free_temp_slots. */
193 int keep;
194 /* The offset of the slot from the frame_pointer, including extra space
195 for alignment. This info is for combine_temp_slots. */
196 HOST_WIDE_INT base_offset;
197 /* The size of the slot, including extra space for alignment. This
198 info is for combine_temp_slots. */
199 HOST_WIDE_INT full_size;
202 /* This structure is used to record MEMs or pseudos used to replace VAR, any
203 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
204 maintain this list in case two operands of an insn were required to match;
205 in that case we must ensure we use the same replacement. */
207 struct fixup_replacement GTY(())
209 rtx old;
210 rtx new;
211 struct fixup_replacement *next;
214 struct insns_for_mem_entry
216 /* A MEM. */
217 rtx key;
218 /* These are the INSNs which reference the MEM. */
219 rtx insns;
222 /* Forward declarations. */
224 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
225 int, struct function *));
226 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
227 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
228 enum machine_mode, enum machine_mode,
229 int, unsigned int, int,
230 htab_t));
231 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
232 enum machine_mode,
233 htab_t));
234 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
235 htab_t));
236 static struct fixup_replacement
237 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
238 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
239 int, int, rtx));
240 static void fixup_var_refs_insns_with_hash
241 PARAMS ((htab_t, rtx,
242 enum machine_mode, int, rtx));
243 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
244 int, int, rtx));
245 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
246 struct fixup_replacement **, rtx));
247 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
248 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
249 int));
250 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
251 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
252 static void instantiate_decls PARAMS ((tree, int));
253 static void instantiate_decls_1 PARAMS ((tree, int));
254 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
255 static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
256 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
257 static void delete_handlers PARAMS ((void));
258 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
259 struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
262 tree));
263 #endif
264 static rtx round_trampoline_addr PARAMS ((rtx));
265 static rtx adjust_trampoline_addr PARAMS ((rtx));
266 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
267 static void reorder_blocks_0 PARAMS ((tree));
268 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
269 static void reorder_fix_fragments PARAMS ((tree));
270 static tree blocks_nreverse PARAMS ((tree));
271 static int all_blocks PARAMS ((tree, tree *));
272 static tree *get_block_vector PARAMS ((tree, int *));
273 extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
274 /* We always define `record_insns' even if its not used so that we
275 can always export `prologue_epilogue_contains'. */
276 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
277 static int contains PARAMS ((rtx, varray_type));
278 #ifdef HAVE_return
279 static void emit_return_into_block PARAMS ((basic_block, rtx));
280 #endif
281 static void put_addressof_into_stack PARAMS ((rtx, htab_t));
282 static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
283 htab_t));
284 static void purge_single_hard_subreg_set PARAMS ((rtx));
285 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
286 static rtx keep_stack_depressed PARAMS ((rtx));
287 #endif
288 static int is_addressof PARAMS ((rtx *, void *));
289 static hashval_t insns_for_mem_hash PARAMS ((const void *));
290 static int insns_for_mem_comp PARAMS ((const void *, const void *));
291 static int insns_for_mem_walk PARAMS ((rtx *, void *));
292 static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
293 static void prepare_function_start PARAMS ((void));
294 static void do_clobber_return_reg PARAMS ((rtx, void *));
295 static void do_use_return_reg PARAMS ((rtx, void *));
297 /* Pointer to chain of `struct function' for containing functions. */
298 static GTY(()) struct function *outer_function_chain;
300 /* Given a function decl for a containing function,
301 return the `struct function' for it. */
303 struct function *
304 find_function_data (decl)
305 tree decl;
307 struct function *p;
309 for (p = outer_function_chain; p; p = p->outer)
310 if (p->decl == decl)
311 return p;
313 abort ();
316 /* Save the current context for compilation of a nested function.
317 This is called from language-specific code. The caller should use
318 the enter_nested langhook to save any language-specific state,
319 since this function knows only about language-independent
320 variables. */
322 void
323 push_function_context_to (context)
324 tree context;
326 struct function *p;
328 if (context)
330 if (context == current_function_decl)
331 cfun->contains_functions = 1;
332 else
334 struct function *containing = find_function_data (context);
335 containing->contains_functions = 1;
339 if (cfun == 0)
340 init_dummy_function_start ();
341 p = cfun;
343 p->outer = outer_function_chain;
344 outer_function_chain = p;
345 p->fixup_var_refs_queue = 0;
347 (*lang_hooks.function.enter_nested) (p);
349 cfun = 0;
352 void
353 push_function_context ()
355 push_function_context_to (current_function_decl);
358 /* Restore the last saved context, at the end of a nested function.
359 This function is called from language-specific code. */
361 void
362 pop_function_context_from (context)
363 tree context ATTRIBUTE_UNUSED;
365 struct function *p = outer_function_chain;
366 struct var_refs_queue *queue;
368 cfun = p;
369 outer_function_chain = p->outer;
371 current_function_decl = p->decl;
372 reg_renumber = 0;
374 restore_emit_status (p);
376 (*lang_hooks.function.leave_nested) (p);
378 /* Finish doing put_var_into_stack for any of our variables which became
379 addressable during the nested function. If only one entry has to be
380 fixed up, just do that one. Otherwise, first make a list of MEMs that
381 are not to be unshared. */
382 if (p->fixup_var_refs_queue == 0)
384 else if (p->fixup_var_refs_queue->next == 0)
385 fixup_var_refs (p->fixup_var_refs_queue->modified,
386 p->fixup_var_refs_queue->promoted_mode,
387 p->fixup_var_refs_queue->unsignedp,
388 p->fixup_var_refs_queue->modified, 0);
389 else
391 rtx list = 0;
393 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
394 list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
396 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
397 fixup_var_refs (queue->modified, queue->promoted_mode,
398 queue->unsignedp, list, 0);
402 p->fixup_var_refs_queue = 0;
404 /* Reset variables that have known state during rtx generation. */
405 rtx_equal_function_value_matters = 1;
406 virtuals_instantiated = 0;
407 generating_concat_p = 1;
410 void
411 pop_function_context ()
413 pop_function_context_from (current_function_decl);
416 /* Clear out all parts of the state in F that can safely be discarded
417 after the function has been parsed, but not compiled, to let
418 garbage collection reclaim the memory. */
420 void
421 free_after_parsing (f)
422 struct function *f;
424 /* f->expr->forced_labels is used by code generation. */
425 /* f->emit->regno_reg_rtx is used by code generation. */
426 /* f->varasm is used by code generation. */
427 /* f->eh->eh_return_stub_label is used by code generation. */
429 (*lang_hooks.function.final) (f);
430 f->stmt = NULL;
433 /* Clear out all parts of the state in F that can safely be discarded
434 after the function has been compiled, to let garbage collection
435 reclaim the memory. */
437 void
438 free_after_compilation (f)
439 struct function *f;
441 f->eh = NULL;
442 f->expr = NULL;
443 f->emit = NULL;
444 f->varasm = NULL;
445 f->machine = NULL;
447 f->x_temp_slots = NULL;
448 f->arg_offset_rtx = NULL;
449 f->return_rtx = NULL;
450 f->internal_arg_pointer = NULL;
451 f->x_nonlocal_labels = NULL;
452 f->x_nonlocal_goto_handler_slots = NULL;
453 f->x_nonlocal_goto_handler_labels = NULL;
454 f->x_nonlocal_goto_stack_level = NULL;
455 f->x_cleanup_label = NULL;
456 f->x_return_label = NULL;
457 f->x_save_expr_regs = NULL;
458 f->x_stack_slot_list = NULL;
459 f->x_rtl_expr_chain = NULL;
460 f->x_tail_recursion_label = NULL;
461 f->x_tail_recursion_reentry = NULL;
462 f->x_arg_pointer_save_area = NULL;
463 f->x_clobber_return_insn = NULL;
464 f->x_context_display = NULL;
465 f->x_trampoline_list = NULL;
466 f->x_parm_birth_insn = NULL;
467 f->x_last_parm_insn = NULL;
468 f->x_parm_reg_stack_loc = NULL;
469 f->fixup_var_refs_queue = NULL;
470 f->original_arg_vector = NULL;
471 f->original_decl_initial = NULL;
472 f->inl_last_parm_insn = NULL;
473 f->epilogue_delay_list = NULL;
476 /* Allocate fixed slots in the stack frame of the current function. */
478 /* Return size needed for stack frame based on slots so far allocated in
479 function F.
480 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
481 the caller may have to do that. */
483 HOST_WIDE_INT
484 get_func_frame_size (f)
485 struct function *f;
487 #ifdef FRAME_GROWS_DOWNWARD
488 return -f->x_frame_offset;
489 #else
490 return f->x_frame_offset;
491 #endif
494 /* Return size needed for stack frame based on slots so far allocated.
495 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
496 the caller may have to do that. */
497 HOST_WIDE_INT
498 get_frame_size ()
500 return get_func_frame_size (cfun);
503 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
504 with machine mode MODE.
506 ALIGN controls the amount of alignment for the address of the slot:
507 0 means according to MODE,
508 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
509 positive specifies alignment boundary in bits.
511 We do not round to stack_boundary here.
513 FUNCTION specifies the function to allocate in. */
515 static rtx
516 assign_stack_local_1 (mode, size, align, function)
517 enum machine_mode mode;
518 HOST_WIDE_INT size;
519 int align;
520 struct function *function;
522 rtx x, addr;
523 int bigend_correction = 0;
524 int alignment;
525 int frame_off, frame_alignment, frame_phase;
527 if (align == 0)
529 tree type;
531 if (mode == BLKmode)
532 alignment = BIGGEST_ALIGNMENT;
533 else
534 alignment = GET_MODE_ALIGNMENT (mode);
536 /* Allow the target to (possibly) increase the alignment of this
537 stack slot. */
538 type = (*lang_hooks.types.type_for_mode) (mode, 0);
539 if (type)
540 alignment = LOCAL_ALIGNMENT (type, alignment);
542 alignment /= BITS_PER_UNIT;
544 else if (align == -1)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 size = CEIL_ROUND (size, alignment);
549 else
550 alignment = align / BITS_PER_UNIT;
552 #ifdef FRAME_GROWS_DOWNWARD
553 function->x_frame_offset -= size;
554 #endif
556 /* Ignore alignment we can't do with expected alignment of the boundary. */
557 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
558 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
560 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
561 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
563 /* Calculate how many bytes the start of local variables is off from
564 stack alignment. */
565 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
566 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
567 frame_phase = frame_off ? frame_alignment - frame_off : 0;
569 /* Round frame offset to that alignment.
570 We must be careful here, since FRAME_OFFSET might be negative and
571 division with a negative dividend isn't as well defined as we might
572 like. So we instead assume that ALIGNMENT is a power of two and
573 use logical operations which are unambiguous. */
574 #ifdef FRAME_GROWS_DOWNWARD
575 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
576 #else
577 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
578 #endif
580 /* On a big-endian machine, if we are allocating more space than we will use,
581 use the least significant bytes of those that are allocated. */
582 if (BYTES_BIG_ENDIAN && mode != BLKmode)
583 bigend_correction = size - GET_MODE_SIZE (mode);
585 /* If we have already instantiated virtual registers, return the actual
586 address relative to the frame pointer. */
587 if (function == cfun && virtuals_instantiated)
588 addr = plus_constant (frame_pointer_rtx,
589 (frame_offset + bigend_correction
590 + STARTING_FRAME_OFFSET));
591 else
592 addr = plus_constant (virtual_stack_vars_rtx,
593 function->x_frame_offset + bigend_correction);
595 #ifndef FRAME_GROWS_DOWNWARD
596 function->x_frame_offset += size;
597 #endif
599 x = gen_rtx_MEM (mode, addr);
601 function->x_stack_slot_list
602 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
604 return x;
607 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
608 current function. */
611 assign_stack_local (mode, size, align)
612 enum machine_mode mode;
613 HOST_WIDE_INT size;
614 int align;
616 return assign_stack_local_1 (mode, size, align, cfun);
619 /* Allocate a temporary stack slot and record it for possible later
620 reuse.
622 MODE is the machine mode to be given to the returned rtx.
624 SIZE is the size in units of the space required. We do no rounding here
625 since assign_stack_local will do any required rounding.
627 KEEP is 1 if this slot is to be retained after a call to
628 free_temp_slots. Automatic variables for a block are allocated
629 with this flag. KEEP is 2 if we allocate a longer term temporary,
630 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
631 if we are to allocate something at an inner level to be treated as
632 a variable in the block (e.g., a SAVE_EXPR).
634 TYPE is the type that will be used for the stack slot. */
637 assign_stack_temp_for_type (mode, size, keep, type)
638 enum machine_mode mode;
639 HOST_WIDE_INT size;
640 int keep;
641 tree type;
643 unsigned int align;
644 struct temp_slot *p, *best_p = 0;
645 rtx slot;
647 /* If SIZE is -1 it means that somebody tried to allocate a temporary
648 of a variable size. */
649 if (size == -1)
650 abort ();
652 if (mode == BLKmode)
653 align = BIGGEST_ALIGNMENT;
654 else
655 align = GET_MODE_ALIGNMENT (mode);
657 if (! type)
658 type = (*lang_hooks.types.type_for_mode) (mode, 0);
660 if (type)
661 align = LOCAL_ALIGNMENT (type, align);
663 /* Try to find an available, already-allocated temporary of the proper
664 mode which meets the size and alignment requirements. Choose the
665 smallest one with the closest alignment. */
666 for (p = temp_slots; p; p = p->next)
667 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
668 && ! p->in_use
669 && objects_must_conflict_p (p->type, type)
670 && (best_p == 0 || best_p->size > p->size
671 || (best_p->size == p->size && best_p->align > p->align)))
673 if (p->align == align && p->size == size)
675 best_p = 0;
676 break;
678 best_p = p;
681 /* Make our best, if any, the one to use. */
682 if (best_p)
684 /* If there are enough aligned bytes left over, make them into a new
685 temp_slot so that the extra bytes don't get wasted. Do this only
686 for BLKmode slots, so that we can be sure of the alignment. */
687 if (GET_MODE (best_p->slot) == BLKmode)
689 int alignment = best_p->align / BITS_PER_UNIT;
690 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
692 if (best_p->size - rounded_size >= alignment)
694 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
695 p->in_use = p->addr_taken = 0;
696 p->size = best_p->size - rounded_size;
697 p->base_offset = best_p->base_offset + rounded_size;
698 p->full_size = best_p->full_size - rounded_size;
699 p->slot = gen_rtx_MEM (BLKmode,
700 plus_constant (XEXP (best_p->slot, 0),
701 rounded_size));
702 p->align = best_p->align;
703 p->address = 0;
704 p->rtl_expr = 0;
705 p->type = best_p->type;
706 p->next = temp_slots;
707 temp_slots = p;
709 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
710 stack_slot_list);
712 best_p->size = rounded_size;
713 best_p->full_size = rounded_size;
717 p = best_p;
720 /* If we still didn't find one, make a new temporary. */
721 if (p == 0)
723 HOST_WIDE_INT frame_offset_old = frame_offset;
725 p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
727 /* We are passing an explicit alignment request to assign_stack_local.
728 One side effect of that is assign_stack_local will not round SIZE
729 to ensure the frame offset remains suitably aligned.
731 So for requests which depended on the rounding of SIZE, we go ahead
732 and round it now. We also make sure ALIGNMENT is at least
733 BIGGEST_ALIGNMENT. */
734 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
735 abort ();
736 p->slot = assign_stack_local (mode,
737 (mode == BLKmode
738 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
739 : size),
740 align);
742 p->align = align;
744 /* The following slot size computation is necessary because we don't
745 know the actual size of the temporary slot until assign_stack_local
746 has performed all the frame alignment and size rounding for the
747 requested temporary. Note that extra space added for alignment
748 can be either above or below this stack slot depending on which
749 way the frame grows. We include the extra space if and only if it
750 is above this slot. */
751 #ifdef FRAME_GROWS_DOWNWARD
752 p->size = frame_offset_old - frame_offset;
753 #else
754 p->size = size;
755 #endif
757 /* Now define the fields used by combine_temp_slots. */
758 #ifdef FRAME_GROWS_DOWNWARD
759 p->base_offset = frame_offset;
760 p->full_size = frame_offset_old - frame_offset;
761 #else
762 p->base_offset = frame_offset_old;
763 p->full_size = frame_offset - frame_offset_old;
764 #endif
765 p->address = 0;
766 p->next = temp_slots;
767 temp_slots = p;
770 p->in_use = 1;
771 p->addr_taken = 0;
772 p->rtl_expr = seq_rtl_expr;
773 p->type = type;
775 if (keep == 2)
777 p->level = target_temp_slot_level;
778 p->keep = 0;
780 else if (keep == 3)
782 p->level = var_temp_slot_level;
783 p->keep = 0;
785 else
787 p->level = temp_slot_level;
788 p->keep = keep;
792 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
793 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
794 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
796 /* If we know the alias set for the memory that will be used, use
797 it. If there's no TYPE, then we don't know anything about the
798 alias set for the memory. */
799 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
800 set_mem_align (slot, align);
802 /* If a type is specified, set the relevant flags. */
803 if (type != 0)
805 RTX_UNCHANGING_P (slot) = TYPE_READONLY (type);
806 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
807 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
810 return slot;
813 /* Allocate a temporary stack slot and record it for possible later
814 reuse. First three arguments are same as in preceding function. */
817 assign_stack_temp (mode, size, keep)
818 enum machine_mode mode;
819 HOST_WIDE_INT size;
820 int keep;
822 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
825 /* Assign a temporary.
826 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
827 and so that should be used in error messages. In either case, we
828 allocate of the given type.
829 KEEP is as for assign_stack_temp.
830 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
831 it is 0 if a register is OK.
832 DONT_PROMOTE is 1 if we should not promote values in register
833 to wider modes. */
836 assign_temp (type_or_decl, keep, memory_required, dont_promote)
837 tree type_or_decl;
838 int keep;
839 int memory_required;
840 int dont_promote ATTRIBUTE_UNUSED;
842 tree type, decl;
843 enum machine_mode mode;
844 #ifndef PROMOTE_FOR_CALL_ONLY
845 int unsignedp;
846 #endif
848 if (DECL_P (type_or_decl))
849 decl = type_or_decl, type = TREE_TYPE (decl);
850 else
851 decl = NULL, type = type_or_decl;
853 mode = TYPE_MODE (type);
854 #ifndef PROMOTE_FOR_CALL_ONLY
855 unsignedp = TREE_UNSIGNED (type);
856 #endif
858 if (mode == BLKmode || memory_required)
860 HOST_WIDE_INT size = int_size_in_bytes (type);
861 rtx tmp;
863 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
864 problems with allocating the stack space. */
865 if (size == 0)
866 size = 1;
868 /* Unfortunately, we don't yet know how to allocate variable-sized
869 temporaries. However, sometimes we have a fixed upper limit on
870 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
871 instead. This is the case for Chill variable-sized strings. */
872 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
873 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
874 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
875 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
877 /* The size of the temporary may be too large to fit into an integer. */
878 /* ??? Not sure this should happen except for user silliness, so limit
879 this to things that aren't compiler-generated temporaries. The
880 rest of the time we'll abort in assign_stack_temp_for_type. */
881 if (decl && size == -1
882 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
884 error_with_decl (decl, "size of variable `%s' is too large");
885 size = 1;
888 tmp = assign_stack_temp_for_type (mode, size, keep, type);
889 return tmp;
892 #ifndef PROMOTE_FOR_CALL_ONLY
893 if (! dont_promote)
894 mode = promote_mode (type, mode, &unsignedp, 0);
895 #endif
897 return gen_reg_rtx (mode);
900 /* Combine temporary stack slots which are adjacent on the stack.
902 This allows for better use of already allocated stack space. This is only
903 done for BLKmode slots because we can be sure that we won't have alignment
904 problems in this case. */
906 void
907 combine_temp_slots ()
909 struct temp_slot *p, *q;
910 struct temp_slot *prev_p, *prev_q;
911 int num_slots;
913 /* We can't combine slots, because the information about which slot
914 is in which alias set will be lost. */
915 if (flag_strict_aliasing)
916 return;
918 /* If there are a lot of temp slots, don't do anything unless
919 high levels of optimization. */
920 if (! flag_expensive_optimizations)
921 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
922 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
923 return;
925 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
927 int delete_p = 0;
929 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
930 for (q = p->next, prev_q = p; q; q = prev_q->next)
932 int delete_q = 0;
933 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
935 if (p->base_offset + p->full_size == q->base_offset)
937 /* Q comes after P; combine Q into P. */
938 p->size += q->size;
939 p->full_size += q->full_size;
940 delete_q = 1;
942 else if (q->base_offset + q->full_size == p->base_offset)
944 /* P comes after Q; combine P into Q. */
945 q->size += p->size;
946 q->full_size += p->full_size;
947 delete_p = 1;
948 break;
951 /* Either delete Q or advance past it. */
952 if (delete_q)
953 prev_q->next = q->next;
954 else
955 prev_q = q;
957 /* Either delete P or advance past it. */
958 if (delete_p)
960 if (prev_p)
961 prev_p->next = p->next;
962 else
963 temp_slots = p->next;
965 else
966 prev_p = p;
970 /* Find the temp slot corresponding to the object at address X. */
972 static struct temp_slot *
973 find_temp_slot_from_address (x)
974 rtx x;
976 struct temp_slot *p;
977 rtx next;
979 for (p = temp_slots; p; p = p->next)
981 if (! p->in_use)
982 continue;
984 else if (XEXP (p->slot, 0) == x
985 || p->address == x
986 || (GET_CODE (x) == PLUS
987 && XEXP (x, 0) == virtual_stack_vars_rtx
988 && GET_CODE (XEXP (x, 1)) == CONST_INT
989 && INTVAL (XEXP (x, 1)) >= p->base_offset
990 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
991 return p;
993 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
994 for (next = p->address; next; next = XEXP (next, 1))
995 if (XEXP (next, 0) == x)
996 return p;
999 /* If we have a sum involving a register, see if it points to a temp
1000 slot. */
1001 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1002 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1003 return p;
1004 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1005 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1006 return p;
1008 return 0;
1011 /* Indicate that NEW is an alternate way of referring to the temp slot
1012 that previously was known by OLD. */
1014 void
1015 update_temp_slot_address (old, new)
1016 rtx old, new;
1018 struct temp_slot *p;
1020 if (rtx_equal_p (old, new))
1021 return;
1023 p = find_temp_slot_from_address (old);
1025 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1026 is a register, see if one operand of the PLUS is a temporary
1027 location. If so, NEW points into it. Otherwise, if both OLD and
1028 NEW are a PLUS and if there is a register in common between them.
1029 If so, try a recursive call on those values. */
1030 if (p == 0)
1032 if (GET_CODE (old) != PLUS)
1033 return;
1035 if (GET_CODE (new) == REG)
1037 update_temp_slot_address (XEXP (old, 0), new);
1038 update_temp_slot_address (XEXP (old, 1), new);
1039 return;
1041 else if (GET_CODE (new) != PLUS)
1042 return;
1044 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1045 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1046 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1047 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1048 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1049 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1050 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1051 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1053 return;
1056 /* Otherwise add an alias for the temp's address. */
1057 else if (p->address == 0)
1058 p->address = new;
1059 else
1061 if (GET_CODE (p->address) != EXPR_LIST)
1062 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1064 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1068 /* If X could be a reference to a temporary slot, mark the fact that its
1069 address was taken. */
1071 void
1072 mark_temp_addr_taken (x)
1073 rtx x;
1075 struct temp_slot *p;
1077 if (x == 0)
1078 return;
1080 /* If X is not in memory or is at a constant address, it cannot be in
1081 a temporary slot. */
1082 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1083 return;
1085 p = find_temp_slot_from_address (XEXP (x, 0));
1086 if (p != 0)
1087 p->addr_taken = 1;
1090 /* If X could be a reference to a temporary slot, mark that slot as
1091 belonging to the to one level higher than the current level. If X
1092 matched one of our slots, just mark that one. Otherwise, we can't
1093 easily predict which it is, so upgrade all of them. Kept slots
1094 need not be touched.
1096 This is called when an ({...}) construct occurs and a statement
1097 returns a value in memory. */
1099 void
1100 preserve_temp_slots (x)
1101 rtx x;
1103 struct temp_slot *p = 0;
1105 /* If there is no result, we still might have some objects whose address
1106 were taken, so we need to make sure they stay around. */
1107 if (x == 0)
1109 for (p = temp_slots; p; p = p->next)
1110 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1111 p->level--;
1113 return;
1116 /* If X is a register that is being used as a pointer, see if we have
1117 a temporary slot we know it points to. To be consistent with
1118 the code below, we really should preserve all non-kept slots
1119 if we can't find a match, but that seems to be much too costly. */
1120 if (GET_CODE (x) == REG && REG_POINTER (x))
1121 p = find_temp_slot_from_address (x);
1123 /* If X is not in memory or is at a constant address, it cannot be in
1124 a temporary slot, but it can contain something whose address was
1125 taken. */
1126 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1128 for (p = temp_slots; p; p = p->next)
1129 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1130 p->level--;
1132 return;
1135 /* First see if we can find a match. */
1136 if (p == 0)
1137 p = find_temp_slot_from_address (XEXP (x, 0));
1139 if (p != 0)
1141 /* Move everything at our level whose address was taken to our new
1142 level in case we used its address. */
1143 struct temp_slot *q;
1145 if (p->level == temp_slot_level)
1147 for (q = temp_slots; q; q = q->next)
1148 if (q != p && q->addr_taken && q->level == p->level)
1149 q->level--;
1151 p->level--;
1152 p->addr_taken = 0;
1154 return;
1157 /* Otherwise, preserve all non-kept slots at this level. */
1158 for (p = temp_slots; p; p = p->next)
1159 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1160 p->level--;
1163 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1164 with that RTL_EXPR, promote it into a temporary slot at the present
1165 level so it will not be freed when we free slots made in the
1166 RTL_EXPR. */
1168 void
1169 preserve_rtl_expr_result (x)
1170 rtx x;
1172 struct temp_slot *p;
1174 /* If X is not in memory or is at a constant address, it cannot be in
1175 a temporary slot. */
1176 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1177 return;
1179 /* If we can find a match, move it to our level unless it is already at
1180 an upper level. */
1181 p = find_temp_slot_from_address (XEXP (x, 0));
1182 if (p != 0)
1184 p->level = MIN (p->level, temp_slot_level);
1185 p->rtl_expr = 0;
1188 return;
1191 /* Free all temporaries used so far. This is normally called at the end
1192 of generating code for a statement. Don't free any temporaries
1193 currently in use for an RTL_EXPR that hasn't yet been emitted.
1194 We could eventually do better than this since it can be reused while
1195 generating the same RTL_EXPR, but this is complex and probably not
1196 worthwhile. */
1198 void
1199 free_temp_slots ()
1201 struct temp_slot *p;
1203 for (p = temp_slots; p; p = p->next)
1204 if (p->in_use && p->level == temp_slot_level && ! p->keep
1205 && p->rtl_expr == 0)
1206 p->in_use = 0;
1208 combine_temp_slots ();
1211 /* Free all temporary slots used in T, an RTL_EXPR node. */
1213 void
1214 free_temps_for_rtl_expr (t)
1215 tree t;
1217 struct temp_slot *p;
1219 for (p = temp_slots; p; p = p->next)
1220 if (p->rtl_expr == t)
1222 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1223 needs to be preserved. This can happen if a temporary in
1224 the RTL_EXPR was addressed; preserve_temp_slots will move
1225 the temporary into a higher level. */
1226 if (temp_slot_level <= p->level)
1227 p->in_use = 0;
1228 else
1229 p->rtl_expr = NULL_TREE;
1232 combine_temp_slots ();
1235 /* Mark all temporaries ever allocated in this function as not suitable
1236 for reuse until the current level is exited. */
1238 void
1239 mark_all_temps_used ()
1241 struct temp_slot *p;
1243 for (p = temp_slots; p; p = p->next)
1245 p->in_use = p->keep = 1;
1246 p->level = MIN (p->level, temp_slot_level);
1250 /* Push deeper into the nesting level for stack temporaries. */
1252 void
1253 push_temp_slots ()
1255 temp_slot_level++;
1258 /* Likewise, but save the new level as the place to allocate variables
1259 for blocks. */
1261 #if 0
1262 void
1263 push_temp_slots_for_block ()
1265 push_temp_slots ();
1267 var_temp_slot_level = temp_slot_level;
1270 /* Likewise, but save the new level as the place to allocate temporaries
1271 for TARGET_EXPRs. */
1273 void
1274 push_temp_slots_for_target ()
1276 push_temp_slots ();
1278 target_temp_slot_level = temp_slot_level;
1281 /* Set and get the value of target_temp_slot_level. The only
1282 permitted use of these functions is to save and restore this value. */
1285 get_target_temp_slot_level ()
1287 return target_temp_slot_level;
1290 void
1291 set_target_temp_slot_level (level)
1292 int level;
1294 target_temp_slot_level = level;
1296 #endif
1298 /* Pop a temporary nesting level. All slots in use in the current level
1299 are freed. */
1301 void
1302 pop_temp_slots ()
1304 struct temp_slot *p;
1306 for (p = temp_slots; p; p = p->next)
1307 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1308 p->in_use = 0;
1310 combine_temp_slots ();
1312 temp_slot_level--;
1315 /* Initialize temporary slots. */
1317 void
1318 init_temp_slots ()
1320 /* We have not allocated any temporaries yet. */
1321 temp_slots = 0;
1322 temp_slot_level = 0;
1323 var_temp_slot_level = 0;
1324 target_temp_slot_level = 0;
1327 /* Retroactively move an auto variable from a register to a stack slot.
1328 This is done when an address-reference to the variable is seen. */
1330 void
1331 put_var_into_stack (decl)
1332 tree decl;
1334 rtx reg;
1335 enum machine_mode promoted_mode, decl_mode;
1336 struct function *function = 0;
1337 tree context;
1338 int can_use_addressof;
1339 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1340 int usedp = (TREE_USED (decl)
1341 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1343 context = decl_function_context (decl);
1345 /* Get the current rtl used for this object and its original mode. */
1346 reg = (TREE_CODE (decl) == SAVE_EXPR
1347 ? SAVE_EXPR_RTL (decl)
1348 : DECL_RTL_IF_SET (decl));
1350 /* No need to do anything if decl has no rtx yet
1351 since in that case caller is setting TREE_ADDRESSABLE
1352 and a stack slot will be assigned when the rtl is made. */
1353 if (reg == 0)
1354 return;
1356 /* Get the declared mode for this object. */
1357 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1358 : DECL_MODE (decl));
1359 /* Get the mode it's actually stored in. */
1360 promoted_mode = GET_MODE (reg);
1362 /* If this variable comes from an outer function, find that
1363 function's saved context. Don't use find_function_data here,
1364 because it might not be in any active function.
1365 FIXME: Is that really supposed to happen?
1366 It does in ObjC at least. */
1367 if (context != current_function_decl && context != inline_function_decl)
1368 for (function = outer_function_chain; function; function = function->outer)
1369 if (function->decl == context)
1370 break;
1372 /* If this is a variable-size object with a pseudo to address it,
1373 put that pseudo into the stack, if the var is nonlocal. */
1374 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1375 && GET_CODE (reg) == MEM
1376 && GET_CODE (XEXP (reg, 0)) == REG
1377 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1379 reg = XEXP (reg, 0);
1380 decl_mode = promoted_mode = GET_MODE (reg);
1383 can_use_addressof
1384 = (function == 0
1385 && optimize > 0
1386 /* FIXME make it work for promoted modes too */
1387 && decl_mode == promoted_mode
1388 #ifdef NON_SAVING_SETJMP
1389 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1390 #endif
1393 /* If we can't use ADDRESSOF, make sure we see through one we already
1394 generated. */
1395 if (! can_use_addressof && GET_CODE (reg) == MEM
1396 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1397 reg = XEXP (XEXP (reg, 0), 0);
1399 /* Now we should have a value that resides in one or more pseudo regs. */
1401 if (GET_CODE (reg) == REG)
1403 /* If this variable lives in the current function and we don't need
1404 to put things in the stack for the sake of setjmp, try to keep it
1405 in a register until we know we actually need the address. */
1406 if (can_use_addressof)
1407 gen_mem_addressof (reg, decl);
1408 else
1409 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1410 decl_mode, volatilep, 0, usedp, 0);
1412 else if (GET_CODE (reg) == CONCAT)
1414 /* A CONCAT contains two pseudos; put them both in the stack.
1415 We do it so they end up consecutive.
1416 We fixup references to the parts only after we fixup references
1417 to the whole CONCAT, lest we do double fixups for the latter
1418 references. */
1419 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1420 tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1421 rtx lopart = XEXP (reg, 0);
1422 rtx hipart = XEXP (reg, 1);
1423 #ifdef FRAME_GROWS_DOWNWARD
1424 /* Since part 0 should have a lower address, do it second. */
1425 put_reg_into_stack (function, hipart, part_type, part_mode,
1426 part_mode, volatilep, 0, 0, 0);
1427 put_reg_into_stack (function, lopart, part_type, part_mode,
1428 part_mode, volatilep, 0, 0, 0);
1429 #else
1430 put_reg_into_stack (function, lopart, part_type, part_mode,
1431 part_mode, volatilep, 0, 0, 0);
1432 put_reg_into_stack (function, hipart, part_type, part_mode,
1433 part_mode, volatilep, 0, 0, 0);
1434 #endif
1436 /* Change the CONCAT into a combined MEM for both parts. */
1437 PUT_CODE (reg, MEM);
1438 MEM_ATTRS (reg) = 0;
1440 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1441 already computed alias sets. Here we want to re-generate. */
1442 if (DECL_P (decl))
1443 SET_DECL_RTL (decl, NULL);
1444 set_mem_attributes (reg, decl, 1);
1445 if (DECL_P (decl))
1446 SET_DECL_RTL (decl, reg);
1448 /* The two parts are in memory order already.
1449 Use the lower parts address as ours. */
1450 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1451 /* Prevent sharing of rtl that might lose. */
1452 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1453 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1454 if (usedp)
1456 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1457 promoted_mode, 0);
1458 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1459 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1462 else
1463 return;
1466 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1467 into the stack frame of FUNCTION (0 means the current function).
1468 DECL_MODE is the machine mode of the user-level data type.
1469 PROMOTED_MODE is the machine mode of the register.
1470 VOLATILE_P is nonzero if this is for a "volatile" decl.
1471 USED_P is nonzero if this reg might have already been used in an insn. */
1473 static void
1474 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1475 original_regno, used_p, ht)
1476 struct function *function;
1477 rtx reg;
1478 tree type;
1479 enum machine_mode promoted_mode, decl_mode;
1480 int volatile_p;
1481 unsigned int original_regno;
1482 int used_p;
1483 htab_t ht;
1485 struct function *func = function ? function : cfun;
1486 rtx new = 0;
1487 unsigned int regno = original_regno;
1489 if (regno == 0)
1490 regno = REGNO (reg);
1492 if (regno < func->x_max_parm_reg)
1493 new = func->x_parm_reg_stack_loc[regno];
1495 if (new == 0)
1496 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1498 PUT_CODE (reg, MEM);
1499 PUT_MODE (reg, decl_mode);
1500 XEXP (reg, 0) = XEXP (new, 0);
1501 MEM_ATTRS (reg) = 0;
1502 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1503 MEM_VOLATILE_P (reg) = volatile_p;
1505 /* If this is a memory ref that contains aggregate components,
1506 mark it as such for cse and loop optimize. If we are reusing a
1507 previously generated stack slot, then we need to copy the bit in
1508 case it was set for other reasons. For instance, it is set for
1509 __builtin_va_alist. */
1510 if (type)
1512 MEM_SET_IN_STRUCT_P (reg,
1513 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1514 set_mem_alias_set (reg, get_alias_set (type));
1517 if (used_p)
1518 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1521 /* Make sure that all refs to the variable, previously made
1522 when it was a register, are fixed up to be valid again.
1523 See function above for meaning of arguments. */
1525 static void
1526 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1527 struct function *function;
1528 rtx reg;
1529 tree type;
1530 enum machine_mode promoted_mode;
1531 htab_t ht;
1533 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1535 if (function != 0)
1537 struct var_refs_queue *temp;
1539 temp
1540 = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1541 temp->modified = reg;
1542 temp->promoted_mode = promoted_mode;
1543 temp->unsignedp = unsigned_p;
1544 temp->next = function->fixup_var_refs_queue;
1545 function->fixup_var_refs_queue = temp;
1547 else
1548 /* Variable is local; fix it up now. */
1549 fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1552 static void
1553 fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1554 rtx var;
1555 enum machine_mode promoted_mode;
1556 int unsignedp;
1557 htab_t ht;
1558 rtx may_share;
1560 tree pending;
1561 rtx first_insn = get_insns ();
1562 struct sequence_stack *stack = seq_stack;
1563 tree rtl_exps = rtl_expr_chain;
1565 /* If there's a hash table, it must record all uses of VAR. */
1566 if (ht)
1568 if (stack != 0)
1569 abort ();
1570 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1571 may_share);
1572 return;
1575 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1576 stack == 0, may_share);
1578 /* Scan all pending sequences too. */
1579 for (; stack; stack = stack->next)
1581 push_to_full_sequence (stack->first, stack->last);
1582 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1583 stack->next != 0, may_share);
1584 /* Update remembered end of sequence
1585 in case we added an insn at the end. */
1586 stack->last = get_last_insn ();
1587 end_sequence ();
1590 /* Scan all waiting RTL_EXPRs too. */
1591 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1593 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1594 if (seq != const0_rtx && seq != 0)
1596 push_to_sequence (seq);
1597 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1598 may_share);
1599 end_sequence ();
1604 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1605 some part of an insn. Return a struct fixup_replacement whose OLD
1606 value is equal to X. Allocate a new structure if no such entry exists. */
1608 static struct fixup_replacement *
1609 find_fixup_replacement (replacements, x)
1610 struct fixup_replacement **replacements;
1611 rtx x;
1613 struct fixup_replacement *p;
1615 /* See if we have already replaced this. */
1616 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1619 if (p == 0)
1621 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1622 p->old = x;
1623 p->new = 0;
1624 p->next = *replacements;
1625 *replacements = p;
1628 return p;
1631 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1632 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1633 for the current function. MAY_SHARE is either a MEM that is not
1634 to be unshared or a list of them. */
1636 static void
1637 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1638 rtx insn;
1639 rtx var;
1640 enum machine_mode promoted_mode;
1641 int unsignedp;
1642 int toplevel;
1643 rtx may_share;
1645 while (insn)
1647 /* fixup_var_refs_insn might modify insn, so save its next
1648 pointer now. */
1649 rtx next = NEXT_INSN (insn);
1651 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1652 the three sequences they (potentially) contain, and process
1653 them recursively. The CALL_INSN itself is not interesting. */
1655 if (GET_CODE (insn) == CALL_INSN
1656 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1658 int i;
1660 /* Look at the Normal call, sibling call and tail recursion
1661 sequences attached to the CALL_PLACEHOLDER. */
1662 for (i = 0; i < 3; i++)
1664 rtx seq = XEXP (PATTERN (insn), i);
1665 if (seq)
1667 push_to_sequence (seq);
1668 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1669 may_share);
1670 XEXP (PATTERN (insn), i) = get_insns ();
1671 end_sequence ();
1676 else if (INSN_P (insn))
1677 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1678 may_share);
1680 insn = next;
1684 /* Look up the insns which reference VAR in HT and fix them up. Other
1685 arguments are the same as fixup_var_refs_insns.
1687 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1688 because the hash table will point straight to the interesting insn
1689 (inside the CALL_PLACEHOLDER). */
1691 static void
1692 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1693 htab_t ht;
1694 rtx var;
1695 enum machine_mode promoted_mode;
1696 int unsignedp;
1697 rtx may_share;
1699 struct insns_for_mem_entry tmp;
1700 struct insns_for_mem_entry *ime;
1701 rtx insn_list;
1703 tmp.key = var;
1704 ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1705 for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1706 if (INSN_P (XEXP (insn_list, 0)))
1707 fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1708 unsignedp, 1, may_share);
1712 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1713 the insn under examination, VAR is the variable to fix up
1714 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1715 TOPLEVEL is nonzero if this is the main insn chain for this
1716 function. */
1718 static void
1719 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1720 rtx insn;
1721 rtx var;
1722 enum machine_mode promoted_mode;
1723 int unsignedp;
1724 int toplevel;
1725 rtx no_share;
1727 rtx call_dest = 0;
1728 rtx set, prev, prev_set;
1729 rtx note;
1731 /* Remember the notes in case we delete the insn. */
1732 note = REG_NOTES (insn);
1734 /* If this is a CLOBBER of VAR, delete it.
1736 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1737 and REG_RETVAL notes too. */
1738 if (GET_CODE (PATTERN (insn)) == CLOBBER
1739 && (XEXP (PATTERN (insn), 0) == var
1740 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1741 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1742 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1744 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1745 /* The REG_LIBCALL note will go away since we are going to
1746 turn INSN into a NOTE, so just delete the
1747 corresponding REG_RETVAL note. */
1748 remove_note (XEXP (note, 0),
1749 find_reg_note (XEXP (note, 0), REG_RETVAL,
1750 NULL_RTX));
1752 delete_insn (insn);
1755 /* The insn to load VAR from a home in the arglist
1756 is now a no-op. When we see it, just delete it.
1757 Similarly if this is storing VAR from a register from which
1758 it was loaded in the previous insn. This will occur
1759 when an ADDRESSOF was made for an arglist slot. */
1760 else if (toplevel
1761 && (set = single_set (insn)) != 0
1762 && SET_DEST (set) == var
1763 /* If this represents the result of an insn group,
1764 don't delete the insn. */
1765 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1766 && (rtx_equal_p (SET_SRC (set), var)
1767 || (GET_CODE (SET_SRC (set)) == REG
1768 && (prev = prev_nonnote_insn (insn)) != 0
1769 && (prev_set = single_set (prev)) != 0
1770 && SET_DEST (prev_set) == SET_SRC (set)
1771 && rtx_equal_p (SET_SRC (prev_set), var))))
1773 delete_insn (insn);
1775 else
1777 struct fixup_replacement *replacements = 0;
1778 rtx next_insn = NEXT_INSN (insn);
1780 if (SMALL_REGISTER_CLASSES)
1782 /* If the insn that copies the results of a CALL_INSN
1783 into a pseudo now references VAR, we have to use an
1784 intermediate pseudo since we want the life of the
1785 return value register to be only a single insn.
1787 If we don't use an intermediate pseudo, such things as
1788 address computations to make the address of VAR valid
1789 if it is not can be placed between the CALL_INSN and INSN.
1791 To make sure this doesn't happen, we record the destination
1792 of the CALL_INSN and see if the next insn uses both that
1793 and VAR. */
1795 if (call_dest != 0 && GET_CODE (insn) == INSN
1796 && reg_mentioned_p (var, PATTERN (insn))
1797 && reg_mentioned_p (call_dest, PATTERN (insn)))
1799 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1801 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1803 PATTERN (insn) = replace_rtx (PATTERN (insn),
1804 call_dest, temp);
1807 if (GET_CODE (insn) == CALL_INSN
1808 && GET_CODE (PATTERN (insn)) == SET)
1809 call_dest = SET_DEST (PATTERN (insn));
1810 else if (GET_CODE (insn) == CALL_INSN
1811 && GET_CODE (PATTERN (insn)) == PARALLEL
1812 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1813 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1814 else
1815 call_dest = 0;
1818 /* See if we have to do anything to INSN now that VAR is in
1819 memory. If it needs to be loaded into a pseudo, use a single
1820 pseudo for the entire insn in case there is a MATCH_DUP
1821 between two operands. We pass a pointer to the head of
1822 a list of struct fixup_replacements. If fixup_var_refs_1
1823 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1824 it will record them in this list.
1826 If it allocated a pseudo for any replacement, we copy into
1827 it here. */
1829 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1830 &replacements, no_share);
1832 /* If this is last_parm_insn, and any instructions were output
1833 after it to fix it up, then we must set last_parm_insn to
1834 the last such instruction emitted. */
1835 if (insn == last_parm_insn)
1836 last_parm_insn = PREV_INSN (next_insn);
1838 while (replacements)
1840 struct fixup_replacement *next;
1842 if (GET_CODE (replacements->new) == REG)
1844 rtx insert_before;
1845 rtx seq;
1847 /* OLD might be a (subreg (mem)). */
1848 if (GET_CODE (replacements->old) == SUBREG)
1849 replacements->old
1850 = fixup_memory_subreg (replacements->old, insn,
1851 promoted_mode, 0);
1852 else
1853 replacements->old
1854 = fixup_stack_1 (replacements->old, insn);
1856 insert_before = insn;
1858 /* If we are changing the mode, do a conversion.
1859 This might be wasteful, but combine.c will
1860 eliminate much of the waste. */
1862 if (GET_MODE (replacements->new)
1863 != GET_MODE (replacements->old))
1865 start_sequence ();
1866 convert_move (replacements->new,
1867 replacements->old, unsignedp);
1868 seq = get_insns ();
1869 end_sequence ();
1871 else
1872 seq = gen_move_insn (replacements->new,
1873 replacements->old);
1875 emit_insn_before (seq, insert_before);
1878 next = replacements->next;
1879 free (replacements);
1880 replacements = next;
1884 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1885 But don't touch other insns referred to by reg-notes;
1886 we will get them elsewhere. */
1887 while (note)
1889 if (GET_CODE (note) != INSN_LIST)
1890 XEXP (note, 0)
1891 = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1892 promoted_mode, 1);
1893 note = XEXP (note, 1);
1897 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1898 See if the rtx expression at *LOC in INSN needs to be changed.
1900 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1901 contain a list of original rtx's and replacements. If we find that we need
1902 to modify this insn by replacing a memory reference with a pseudo or by
1903 making a new MEM to implement a SUBREG, we consult that list to see if
1904 we have already chosen a replacement. If none has already been allocated,
1905 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1906 or the SUBREG, as appropriate, to the pseudo. */
1908 static void
1909 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1910 rtx var;
1911 enum machine_mode promoted_mode;
1912 rtx *loc;
1913 rtx insn;
1914 struct fixup_replacement **replacements;
1915 rtx no_share;
1917 int i;
1918 rtx x = *loc;
1919 RTX_CODE code = GET_CODE (x);
1920 const char *fmt;
1921 rtx tem, tem1;
1922 struct fixup_replacement *replacement;
1924 switch (code)
1926 case ADDRESSOF:
1927 if (XEXP (x, 0) == var)
1929 /* Prevent sharing of rtl that might lose. */
1930 rtx sub = copy_rtx (XEXP (var, 0));
1932 if (! validate_change (insn, loc, sub, 0))
1934 rtx y = gen_reg_rtx (GET_MODE (sub));
1935 rtx seq, new_insn;
1937 /* We should be able to replace with a register or all is lost.
1938 Note that we can't use validate_change to verify this, since
1939 we're not caring for replacing all dups simultaneously. */
1940 if (! validate_replace_rtx (*loc, y, insn))
1941 abort ();
1943 /* Careful! First try to recognize a direct move of the
1944 value, mimicking how things are done in gen_reload wrt
1945 PLUS. Consider what happens when insn is a conditional
1946 move instruction and addsi3 clobbers flags. */
1948 start_sequence ();
1949 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1950 seq = get_insns ();
1951 end_sequence ();
1953 if (recog_memoized (new_insn) < 0)
1955 /* That failed. Fall back on force_operand and hope. */
1957 start_sequence ();
1958 sub = force_operand (sub, y);
1959 if (sub != y)
1960 emit_insn (gen_move_insn (y, sub));
1961 seq = get_insns ();
1962 end_sequence ();
1965 #ifdef HAVE_cc0
1966 /* Don't separate setter from user. */
1967 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1968 insn = PREV_INSN (insn);
1969 #endif
1971 emit_insn_before (seq, insn);
1974 return;
1976 case MEM:
1977 if (var == x)
1979 /* If we already have a replacement, use it. Otherwise,
1980 try to fix up this address in case it is invalid. */
1982 replacement = find_fixup_replacement (replacements, var);
1983 if (replacement->new)
1985 *loc = replacement->new;
1986 return;
1989 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1991 /* Unless we are forcing memory to register or we changed the mode,
1992 we can leave things the way they are if the insn is valid. */
1994 INSN_CODE (insn) = -1;
1995 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1996 && recog_memoized (insn) >= 0)
1997 return;
1999 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2000 return;
2003 /* If X contains VAR, we need to unshare it here so that we update
2004 each occurrence separately. But all identical MEMs in one insn
2005 must be replaced with the same rtx because of the possibility of
2006 MATCH_DUPs. */
2008 if (reg_mentioned_p (var, x))
2010 replacement = find_fixup_replacement (replacements, x);
2011 if (replacement->new == 0)
2012 replacement->new = copy_most_rtx (x, no_share);
2014 *loc = x = replacement->new;
2015 code = GET_CODE (x);
2017 break;
2019 case REG:
2020 case CC0:
2021 case PC:
2022 case CONST_INT:
2023 case CONST:
2024 case SYMBOL_REF:
2025 case LABEL_REF:
2026 case CONST_DOUBLE:
2027 case CONST_VECTOR:
2028 return;
2030 case SIGN_EXTRACT:
2031 case ZERO_EXTRACT:
2032 /* Note that in some cases those types of expressions are altered
2033 by optimize_bit_field, and do not survive to get here. */
2034 if (XEXP (x, 0) == var
2035 || (GET_CODE (XEXP (x, 0)) == SUBREG
2036 && SUBREG_REG (XEXP (x, 0)) == var))
2038 /* Get TEM as a valid MEM in the mode presently in the insn.
2040 We don't worry about the possibility of MATCH_DUP here; it
2041 is highly unlikely and would be tricky to handle. */
2043 tem = XEXP (x, 0);
2044 if (GET_CODE (tem) == SUBREG)
2046 if (GET_MODE_BITSIZE (GET_MODE (tem))
2047 > GET_MODE_BITSIZE (GET_MODE (var)))
2049 replacement = find_fixup_replacement (replacements, var);
2050 if (replacement->new == 0)
2051 replacement->new = gen_reg_rtx (GET_MODE (var));
2052 SUBREG_REG (tem) = replacement->new;
2054 /* The following code works only if we have a MEM, so we
2055 need to handle the subreg here. We directly substitute
2056 it assuming that a subreg must be OK here. We already
2057 scheduled a replacement to copy the mem into the
2058 subreg. */
2059 XEXP (x, 0) = tem;
2060 return;
2062 else
2063 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2065 else
2066 tem = fixup_stack_1 (tem, insn);
2068 /* Unless we want to load from memory, get TEM into the proper mode
2069 for an extract from memory. This can only be done if the
2070 extract is at a constant position and length. */
2072 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2073 && GET_CODE (XEXP (x, 2)) == CONST_INT
2074 && ! mode_dependent_address_p (XEXP (tem, 0))
2075 && ! MEM_VOLATILE_P (tem))
2077 enum machine_mode wanted_mode = VOIDmode;
2078 enum machine_mode is_mode = GET_MODE (tem);
2079 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2081 if (GET_CODE (x) == ZERO_EXTRACT)
2083 enum machine_mode new_mode
2084 = mode_for_extraction (EP_extzv, 1);
2085 if (new_mode != MAX_MACHINE_MODE)
2086 wanted_mode = new_mode;
2088 else if (GET_CODE (x) == SIGN_EXTRACT)
2090 enum machine_mode new_mode
2091 = mode_for_extraction (EP_extv, 1);
2092 if (new_mode != MAX_MACHINE_MODE)
2093 wanted_mode = new_mode;
2096 /* If we have a narrower mode, we can do something. */
2097 if (wanted_mode != VOIDmode
2098 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2100 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2101 rtx old_pos = XEXP (x, 2);
2102 rtx newmem;
2104 /* If the bytes and bits are counted differently, we
2105 must adjust the offset. */
2106 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2107 offset = (GET_MODE_SIZE (is_mode)
2108 - GET_MODE_SIZE (wanted_mode) - offset);
2110 pos %= GET_MODE_BITSIZE (wanted_mode);
2112 newmem = adjust_address_nv (tem, wanted_mode, offset);
2114 /* Make the change and see if the insn remains valid. */
2115 INSN_CODE (insn) = -1;
2116 XEXP (x, 0) = newmem;
2117 XEXP (x, 2) = GEN_INT (pos);
2119 if (recog_memoized (insn) >= 0)
2120 return;
2122 /* Otherwise, restore old position. XEXP (x, 0) will be
2123 restored later. */
2124 XEXP (x, 2) = old_pos;
2128 /* If we get here, the bitfield extract insn can't accept a memory
2129 reference. Copy the input into a register. */
2131 tem1 = gen_reg_rtx (GET_MODE (tem));
2132 emit_insn_before (gen_move_insn (tem1, tem), insn);
2133 XEXP (x, 0) = tem1;
2134 return;
2136 break;
2138 case SUBREG:
2139 if (SUBREG_REG (x) == var)
2141 /* If this is a special SUBREG made because VAR was promoted
2142 from a wider mode, replace it with VAR and call ourself
2143 recursively, this time saying that the object previously
2144 had its current mode (by virtue of the SUBREG). */
2146 if (SUBREG_PROMOTED_VAR_P (x))
2148 *loc = var;
2149 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2150 no_share);
2151 return;
2154 /* If this SUBREG makes VAR wider, it has become a paradoxical
2155 SUBREG with VAR in memory, but these aren't allowed at this
2156 stage of the compilation. So load VAR into a pseudo and take
2157 a SUBREG of that pseudo. */
2158 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2160 replacement = find_fixup_replacement (replacements, var);
2161 if (replacement->new == 0)
2162 replacement->new = gen_reg_rtx (promoted_mode);
2163 SUBREG_REG (x) = replacement->new;
2164 return;
2167 /* See if we have already found a replacement for this SUBREG.
2168 If so, use it. Otherwise, make a MEM and see if the insn
2169 is recognized. If not, or if we should force MEM into a register,
2170 make a pseudo for this SUBREG. */
2171 replacement = find_fixup_replacement (replacements, x);
2172 if (replacement->new)
2174 *loc = replacement->new;
2175 return;
2178 replacement->new = *loc = fixup_memory_subreg (x, insn,
2179 promoted_mode, 0);
2181 INSN_CODE (insn) = -1;
2182 if (! flag_force_mem && recog_memoized (insn) >= 0)
2183 return;
2185 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2186 return;
2188 break;
2190 case SET:
2191 /* First do special simplification of bit-field references. */
2192 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2193 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2194 optimize_bit_field (x, insn, 0);
2195 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2196 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2197 optimize_bit_field (x, insn, 0);
2199 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2200 into a register and then store it back out. */
2201 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2202 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2203 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2204 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2205 > GET_MODE_SIZE (GET_MODE (var))))
2207 replacement = find_fixup_replacement (replacements, var);
2208 if (replacement->new == 0)
2209 replacement->new = gen_reg_rtx (GET_MODE (var));
2211 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2212 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2215 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2216 insn into a pseudo and store the low part of the pseudo into VAR. */
2217 if (GET_CODE (SET_DEST (x)) == SUBREG
2218 && SUBREG_REG (SET_DEST (x)) == var
2219 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2220 > GET_MODE_SIZE (GET_MODE (var))))
2222 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2223 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2224 tem)),
2225 insn);
2226 break;
2230 rtx dest = SET_DEST (x);
2231 rtx src = SET_SRC (x);
2232 rtx outerdest = dest;
2234 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2235 || GET_CODE (dest) == SIGN_EXTRACT
2236 || GET_CODE (dest) == ZERO_EXTRACT)
2237 dest = XEXP (dest, 0);
2239 if (GET_CODE (src) == SUBREG)
2240 src = SUBREG_REG (src);
2242 /* If VAR does not appear at the top level of the SET
2243 just scan the lower levels of the tree. */
2245 if (src != var && dest != var)
2246 break;
2248 /* We will need to rerecognize this insn. */
2249 INSN_CODE (insn) = -1;
2251 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2252 && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2254 /* Since this case will return, ensure we fixup all the
2255 operands here. */
2256 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2257 insn, replacements, no_share);
2258 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2259 insn, replacements, no_share);
2260 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2261 insn, replacements, no_share);
2263 tem = XEXP (outerdest, 0);
2265 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2266 that may appear inside a ZERO_EXTRACT.
2267 This was legitimate when the MEM was a REG. */
2268 if (GET_CODE (tem) == SUBREG
2269 && SUBREG_REG (tem) == var)
2270 tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2271 else
2272 tem = fixup_stack_1 (tem, insn);
2274 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2275 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2276 && ! mode_dependent_address_p (XEXP (tem, 0))
2277 && ! MEM_VOLATILE_P (tem))
2279 enum machine_mode wanted_mode;
2280 enum machine_mode is_mode = GET_MODE (tem);
2281 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2283 wanted_mode = mode_for_extraction (EP_insv, 0);
2285 /* If we have a narrower mode, we can do something. */
2286 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2288 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2289 rtx old_pos = XEXP (outerdest, 2);
2290 rtx newmem;
2292 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2293 offset = (GET_MODE_SIZE (is_mode)
2294 - GET_MODE_SIZE (wanted_mode) - offset);
2296 pos %= GET_MODE_BITSIZE (wanted_mode);
2298 newmem = adjust_address_nv (tem, wanted_mode, offset);
2300 /* Make the change and see if the insn remains valid. */
2301 INSN_CODE (insn) = -1;
2302 XEXP (outerdest, 0) = newmem;
2303 XEXP (outerdest, 2) = GEN_INT (pos);
2305 if (recog_memoized (insn) >= 0)
2306 return;
2308 /* Otherwise, restore old position. XEXP (x, 0) will be
2309 restored later. */
2310 XEXP (outerdest, 2) = old_pos;
2314 /* If we get here, the bit-field store doesn't allow memory
2315 or isn't located at a constant position. Load the value into
2316 a register, do the store, and put it back into memory. */
2318 tem1 = gen_reg_rtx (GET_MODE (tem));
2319 emit_insn_before (gen_move_insn (tem1, tem), insn);
2320 emit_insn_after (gen_move_insn (tem, tem1), insn);
2321 XEXP (outerdest, 0) = tem1;
2322 return;
2325 /* STRICT_LOW_PART is a no-op on memory references
2326 and it can cause combinations to be unrecognizable,
2327 so eliminate it. */
2329 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2330 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2332 /* A valid insn to copy VAR into or out of a register
2333 must be left alone, to avoid an infinite loop here.
2334 If the reference to VAR is by a subreg, fix that up,
2335 since SUBREG is not valid for a memref.
2336 Also fix up the address of the stack slot.
2338 Note that we must not try to recognize the insn until
2339 after we know that we have valid addresses and no
2340 (subreg (mem ...) ...) constructs, since these interfere
2341 with determining the validity of the insn. */
2343 if ((SET_SRC (x) == var
2344 || (GET_CODE (SET_SRC (x)) == SUBREG
2345 && SUBREG_REG (SET_SRC (x)) == var))
2346 && (GET_CODE (SET_DEST (x)) == REG
2347 || (GET_CODE (SET_DEST (x)) == SUBREG
2348 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2349 && GET_MODE (var) == promoted_mode
2350 && x == single_set (insn))
2352 rtx pat, last;
2354 if (GET_CODE (SET_SRC (x)) == SUBREG
2355 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2356 > GET_MODE_SIZE (GET_MODE (var))))
2358 /* This (subreg VAR) is now a paradoxical subreg. We need
2359 to replace VAR instead of the subreg. */
2360 replacement = find_fixup_replacement (replacements, var);
2361 if (replacement->new == NULL_RTX)
2362 replacement->new = gen_reg_rtx (GET_MODE (var));
2363 SUBREG_REG (SET_SRC (x)) = replacement->new;
2365 else
2367 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2368 if (replacement->new)
2369 SET_SRC (x) = replacement->new;
2370 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2371 SET_SRC (x) = replacement->new
2372 = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2374 else
2375 SET_SRC (x) = replacement->new
2376 = fixup_stack_1 (SET_SRC (x), insn);
2379 if (recog_memoized (insn) >= 0)
2380 return;
2382 /* INSN is not valid, but we know that we want to
2383 copy SET_SRC (x) to SET_DEST (x) in some way. So
2384 we generate the move and see whether it requires more
2385 than one insn. If it does, we emit those insns and
2386 delete INSN. Otherwise, we an just replace the pattern
2387 of INSN; we have already verified above that INSN has
2388 no other function that to do X. */
2390 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2391 if (NEXT_INSN (pat) != NULL_RTX)
2393 last = emit_insn_before (pat, insn);
2395 /* INSN might have REG_RETVAL or other important notes, so
2396 we need to store the pattern of the last insn in the
2397 sequence into INSN similarly to the normal case. LAST
2398 should not have REG_NOTES, but we allow them if INSN has
2399 no REG_NOTES. */
2400 if (REG_NOTES (last) && REG_NOTES (insn))
2401 abort ();
2402 if (REG_NOTES (last))
2403 REG_NOTES (insn) = REG_NOTES (last);
2404 PATTERN (insn) = PATTERN (last);
2406 delete_insn (last);
2408 else
2409 PATTERN (insn) = PATTERN (pat);
2411 return;
2414 if ((SET_DEST (x) == var
2415 || (GET_CODE (SET_DEST (x)) == SUBREG
2416 && SUBREG_REG (SET_DEST (x)) == var))
2417 && (GET_CODE (SET_SRC (x)) == REG
2418 || (GET_CODE (SET_SRC (x)) == SUBREG
2419 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2420 && GET_MODE (var) == promoted_mode
2421 && x == single_set (insn))
2423 rtx pat, last;
2425 if (GET_CODE (SET_DEST (x)) == SUBREG)
2426 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2427 promoted_mode, 0);
2428 else
2429 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2431 if (recog_memoized (insn) >= 0)
2432 return;
2434 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2435 if (NEXT_INSN (pat) != NULL_RTX)
2437 last = emit_insn_before (pat, insn);
2439 /* INSN might have REG_RETVAL or other important notes, so
2440 we need to store the pattern of the last insn in the
2441 sequence into INSN similarly to the normal case. LAST
2442 should not have REG_NOTES, but we allow them if INSN has
2443 no REG_NOTES. */
2444 if (REG_NOTES (last) && REG_NOTES (insn))
2445 abort ();
2446 if (REG_NOTES (last))
2447 REG_NOTES (insn) = REG_NOTES (last);
2448 PATTERN (insn) = PATTERN (last);
2450 delete_insn (last);
2452 else
2453 PATTERN (insn) = PATTERN (pat);
2455 return;
2458 /* Otherwise, storing into VAR must be handled specially
2459 by storing into a temporary and copying that into VAR
2460 with a new insn after this one. Note that this case
2461 will be used when storing into a promoted scalar since
2462 the insn will now have different modes on the input
2463 and output and hence will be invalid (except for the case
2464 of setting it to a constant, which does not need any
2465 change if it is valid). We generate extra code in that case,
2466 but combine.c will eliminate it. */
2468 if (dest == var)
2470 rtx temp;
2471 rtx fixeddest = SET_DEST (x);
2472 enum machine_mode temp_mode;
2474 /* STRICT_LOW_PART can be discarded, around a MEM. */
2475 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2476 fixeddest = XEXP (fixeddest, 0);
2477 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2478 if (GET_CODE (fixeddest) == SUBREG)
2480 fixeddest = fixup_memory_subreg (fixeddest, insn,
2481 promoted_mode, 0);
2482 temp_mode = GET_MODE (fixeddest);
2484 else
2486 fixeddest = fixup_stack_1 (fixeddest, insn);
2487 temp_mode = promoted_mode;
2490 temp = gen_reg_rtx (temp_mode);
2492 emit_insn_after (gen_move_insn (fixeddest,
2493 gen_lowpart (GET_MODE (fixeddest),
2494 temp)),
2495 insn);
2497 SET_DEST (x) = temp;
2501 default:
2502 break;
2505 /* Nothing special about this RTX; fix its operands. */
2507 fmt = GET_RTX_FORMAT (code);
2508 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2510 if (fmt[i] == 'e')
2511 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2512 no_share);
2513 else if (fmt[i] == 'E')
2515 int j;
2516 for (j = 0; j < XVECLEN (x, i); j++)
2517 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2518 insn, replacements, no_share);
2523 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2524 The REG was placed on the stack, so X now has the form (SUBREG:m1
2525 (MEM:m2 ...)).
2527 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2528 must be emitted to compute NEWADDR, put them before INSN.
2530 UNCRITICAL nonzero means accept paradoxical subregs.
2531 This is used for subregs found inside REG_NOTES. */
2533 static rtx
2534 fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2535 rtx x;
2536 rtx insn;
2537 enum machine_mode promoted_mode;
2538 int uncritical;
2540 int offset;
2541 rtx mem = SUBREG_REG (x);
2542 rtx addr = XEXP (mem, 0);
2543 enum machine_mode mode = GET_MODE (x);
2544 rtx result, seq;
2546 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2547 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2548 abort ();
2550 offset = SUBREG_BYTE (x);
2551 if (BYTES_BIG_ENDIAN)
2552 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2553 the offset so that it points to the right location within the
2554 MEM. */
2555 offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2557 if (!flag_force_addr
2558 && memory_address_p (mode, plus_constant (addr, offset)))
2559 /* Shortcut if no insns need be emitted. */
2560 return adjust_address (mem, mode, offset);
2562 start_sequence ();
2563 result = adjust_address (mem, mode, offset);
2564 seq = get_insns ();
2565 end_sequence ();
2567 emit_insn_before (seq, insn);
2568 return result;
2571 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2572 Replace subexpressions of X in place.
2573 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2574 Otherwise return X, with its contents possibly altered.
2576 INSN, PROMOTED_MODE and UNCRITICAL are as for
2577 fixup_memory_subreg. */
2579 static rtx
2580 walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2581 rtx x;
2582 rtx insn;
2583 enum machine_mode promoted_mode;
2584 int uncritical;
2586 enum rtx_code code;
2587 const char *fmt;
2588 int i;
2590 if (x == 0)
2591 return 0;
2593 code = GET_CODE (x);
2595 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2596 return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2598 /* Nothing special about this RTX; fix its operands. */
2600 fmt = GET_RTX_FORMAT (code);
2601 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2603 if (fmt[i] == 'e')
2604 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2605 promoted_mode, uncritical);
2606 else if (fmt[i] == 'E')
2608 int j;
2609 for (j = 0; j < XVECLEN (x, i); j++)
2610 XVECEXP (x, i, j)
2611 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2612 promoted_mode, uncritical);
2615 return x;
2618 /* For each memory ref within X, if it refers to a stack slot
2619 with an out of range displacement, put the address in a temp register
2620 (emitting new insns before INSN to load these registers)
2621 and alter the memory ref to use that register.
2622 Replace each such MEM rtx with a copy, to avoid clobberage. */
2624 static rtx
2625 fixup_stack_1 (x, insn)
2626 rtx x;
2627 rtx insn;
2629 int i;
2630 RTX_CODE code = GET_CODE (x);
2631 const char *fmt;
2633 if (code == MEM)
2635 rtx ad = XEXP (x, 0);
2636 /* If we have address of a stack slot but it's not valid
2637 (displacement is too large), compute the sum in a register. */
2638 if (GET_CODE (ad) == PLUS
2639 && GET_CODE (XEXP (ad, 0)) == REG
2640 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2641 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2642 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2643 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2644 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2645 #endif
2646 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2647 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2648 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2649 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2651 rtx temp, seq;
2652 if (memory_address_p (GET_MODE (x), ad))
2653 return x;
2655 start_sequence ();
2656 temp = copy_to_reg (ad);
2657 seq = get_insns ();
2658 end_sequence ();
2659 emit_insn_before (seq, insn);
2660 return replace_equiv_address (x, temp);
2662 return x;
2665 fmt = GET_RTX_FORMAT (code);
2666 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2668 if (fmt[i] == 'e')
2669 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2670 else if (fmt[i] == 'E')
2672 int j;
2673 for (j = 0; j < XVECLEN (x, i); j++)
2674 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2677 return x;
2680 /* Optimization: a bit-field instruction whose field
2681 happens to be a byte or halfword in memory
2682 can be changed to a move instruction.
2684 We call here when INSN is an insn to examine or store into a bit-field.
2685 BODY is the SET-rtx to be altered.
2687 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2688 (Currently this is called only from function.c, and EQUIV_MEM
2689 is always 0.) */
2691 static void
2692 optimize_bit_field (body, insn, equiv_mem)
2693 rtx body;
2694 rtx insn;
2695 rtx *equiv_mem;
2697 rtx bitfield;
2698 int destflag;
2699 rtx seq = 0;
2700 enum machine_mode mode;
2702 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2703 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2704 bitfield = SET_DEST (body), destflag = 1;
2705 else
2706 bitfield = SET_SRC (body), destflag = 0;
2708 /* First check that the field being stored has constant size and position
2709 and is in fact a byte or halfword suitably aligned. */
2711 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2712 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2713 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2714 != BLKmode)
2715 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2717 rtx memref = 0;
2719 /* Now check that the containing word is memory, not a register,
2720 and that it is safe to change the machine mode. */
2722 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2723 memref = XEXP (bitfield, 0);
2724 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2725 && equiv_mem != 0)
2726 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2727 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2728 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2729 memref = SUBREG_REG (XEXP (bitfield, 0));
2730 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2731 && equiv_mem != 0
2732 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2733 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2735 if (memref
2736 && ! mode_dependent_address_p (XEXP (memref, 0))
2737 && ! MEM_VOLATILE_P (memref))
2739 /* Now adjust the address, first for any subreg'ing
2740 that we are now getting rid of,
2741 and then for which byte of the word is wanted. */
2743 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2744 rtx insns;
2746 /* Adjust OFFSET to count bits from low-address byte. */
2747 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2748 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2749 - offset - INTVAL (XEXP (bitfield, 1)));
2751 /* Adjust OFFSET to count bytes from low-address byte. */
2752 offset /= BITS_PER_UNIT;
2753 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2755 offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2756 / UNITS_PER_WORD) * UNITS_PER_WORD;
2757 if (BYTES_BIG_ENDIAN)
2758 offset -= (MIN (UNITS_PER_WORD,
2759 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2760 - MIN (UNITS_PER_WORD,
2761 GET_MODE_SIZE (GET_MODE (memref))));
2764 start_sequence ();
2765 memref = adjust_address (memref, mode, offset);
2766 insns = get_insns ();
2767 end_sequence ();
2768 emit_insn_before (insns, insn);
2770 /* Store this memory reference where
2771 we found the bit field reference. */
2773 if (destflag)
2775 validate_change (insn, &SET_DEST (body), memref, 1);
2776 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2778 rtx src = SET_SRC (body);
2779 while (GET_CODE (src) == SUBREG
2780 && SUBREG_BYTE (src) == 0)
2781 src = SUBREG_REG (src);
2782 if (GET_MODE (src) != GET_MODE (memref))
2783 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2784 validate_change (insn, &SET_SRC (body), src, 1);
2786 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2787 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2788 /* This shouldn't happen because anything that didn't have
2789 one of these modes should have got converted explicitly
2790 and then referenced through a subreg.
2791 This is so because the original bit-field was
2792 handled by agg_mode and so its tree structure had
2793 the same mode that memref now has. */
2794 abort ();
2796 else
2798 rtx dest = SET_DEST (body);
2800 while (GET_CODE (dest) == SUBREG
2801 && SUBREG_BYTE (dest) == 0
2802 && (GET_MODE_CLASS (GET_MODE (dest))
2803 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2804 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2805 <= UNITS_PER_WORD))
2806 dest = SUBREG_REG (dest);
2808 validate_change (insn, &SET_DEST (body), dest, 1);
2810 if (GET_MODE (dest) == GET_MODE (memref))
2811 validate_change (insn, &SET_SRC (body), memref, 1);
2812 else
2814 /* Convert the mem ref to the destination mode. */
2815 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2817 start_sequence ();
2818 convert_move (newreg, memref,
2819 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2820 seq = get_insns ();
2821 end_sequence ();
2823 validate_change (insn, &SET_SRC (body), newreg, 1);
2827 /* See if we can convert this extraction or insertion into
2828 a simple move insn. We might not be able to do so if this
2829 was, for example, part of a PARALLEL.
2831 If we succeed, write out any needed conversions. If we fail,
2832 it is hard to guess why we failed, so don't do anything
2833 special; just let the optimization be suppressed. */
2835 if (apply_change_group () && seq)
2836 emit_insn_before (seq, insn);
2841 /* These routines are responsible for converting virtual register references
2842 to the actual hard register references once RTL generation is complete.
2844 The following four variables are used for communication between the
2845 routines. They contain the offsets of the virtual registers from their
2846 respective hard registers. */
2848 static int in_arg_offset;
2849 static int var_offset;
2850 static int dynamic_offset;
2851 static int out_arg_offset;
2852 static int cfa_offset;
2854 /* In most machines, the stack pointer register is equivalent to the bottom
2855 of the stack. */
2857 #ifndef STACK_POINTER_OFFSET
2858 #define STACK_POINTER_OFFSET 0
2859 #endif
2861 /* If not defined, pick an appropriate default for the offset of dynamically
2862 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2863 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2865 #ifndef STACK_DYNAMIC_OFFSET
2867 /* The bottom of the stack points to the actual arguments. If
2868 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2869 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2870 stack space for register parameters is not pushed by the caller, but
2871 rather part of the fixed stack areas and hence not included in
2872 `current_function_outgoing_args_size'. Nevertheless, we must allow
2873 for it when allocating stack dynamic objects. */
2875 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2876 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2877 ((ACCUMULATE_OUTGOING_ARGS \
2878 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2879 + (STACK_POINTER_OFFSET)) \
2881 #else
2882 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2883 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2884 + (STACK_POINTER_OFFSET))
2885 #endif
2886 #endif
2888 /* On most machines, the CFA coincides with the first incoming parm. */
2890 #ifndef ARG_POINTER_CFA_OFFSET
2891 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2892 #endif
2894 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
2895 address taken. DECL is the decl or SAVE_EXPR for the object stored in the
2896 register, for later use if we do need to force REG into the stack. REG is
2897 overwritten by the MEM like in put_reg_into_stack. */
2900 gen_mem_addressof (reg, decl)
2901 rtx reg;
2902 tree decl;
2904 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2905 REGNO (reg), decl);
2907 /* Calculate this before we start messing with decl's RTL. */
2908 HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2910 /* If the original REG was a user-variable, then so is the REG whose
2911 address is being taken. Likewise for unchanging. */
2912 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2913 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2915 PUT_CODE (reg, MEM);
2916 MEM_ATTRS (reg) = 0;
2917 XEXP (reg, 0) = r;
2919 if (decl)
2921 tree type = TREE_TYPE (decl);
2922 enum machine_mode decl_mode
2923 = (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2924 rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2925 : DECL_RTL_IF_SET (decl));
2927 PUT_MODE (reg, decl_mode);
2929 /* Clear DECL_RTL momentarily so functions below will work
2930 properly, then set it again. */
2931 if (DECL_P (decl) && decl_rtl == reg)
2932 SET_DECL_RTL (decl, 0);
2934 set_mem_attributes (reg, decl, 1);
2935 set_mem_alias_set (reg, set);
2937 if (DECL_P (decl) && decl_rtl == reg)
2938 SET_DECL_RTL (decl, reg);
2940 if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
2941 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2943 else
2944 fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2946 return reg;
2949 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2951 void
2952 flush_addressof (decl)
2953 tree decl;
2955 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2956 && DECL_RTL (decl) != 0
2957 && GET_CODE (DECL_RTL (decl)) == MEM
2958 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2959 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2960 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2963 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2965 static void
2966 put_addressof_into_stack (r, ht)
2967 rtx r;
2968 htab_t ht;
2970 tree decl, type;
2971 int volatile_p, used_p;
2973 rtx reg = XEXP (r, 0);
2975 if (GET_CODE (reg) != REG)
2976 abort ();
2978 decl = ADDRESSOF_DECL (r);
2979 if (decl)
2981 type = TREE_TYPE (decl);
2982 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2983 && TREE_THIS_VOLATILE (decl));
2984 used_p = (TREE_USED (decl)
2985 || (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2987 else
2989 type = NULL_TREE;
2990 volatile_p = 0;
2991 used_p = 1;
2994 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2995 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2998 /* List of replacements made below in purge_addressof_1 when creating
2999 bitfield insertions. */
3000 static rtx purge_bitfield_addressof_replacements;
3002 /* List of replacements made below in purge_addressof_1 for patterns
3003 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3004 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3005 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3006 enough in complex cases, e.g. when some field values can be
3007 extracted by usage MEM with narrower mode. */
3008 static rtx purge_addressof_replacements;
3010 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3011 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3012 the stack. If the function returns FALSE then the replacement could not
3013 be made. */
3015 static bool
3016 purge_addressof_1 (loc, insn, force, store, ht)
3017 rtx *loc;
3018 rtx insn;
3019 int force, store;
3020 htab_t ht;
3022 rtx x;
3023 RTX_CODE code;
3024 int i, j;
3025 const char *fmt;
3026 bool result = true;
3028 /* Re-start here to avoid recursion in common cases. */
3029 restart:
3031 x = *loc;
3032 if (x == 0)
3033 return true;
3035 code = GET_CODE (x);
3037 /* If we don't return in any of the cases below, we will recurse inside
3038 the RTX, which will normally result in any ADDRESSOF being forced into
3039 memory. */
3040 if (code == SET)
3042 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3043 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3044 return result;
3046 else if (code == ADDRESSOF)
3048 rtx sub, insns;
3050 if (GET_CODE (XEXP (x, 0)) != MEM)
3052 put_addressof_into_stack (x, ht);
3053 return true;
3056 /* We must create a copy of the rtx because it was created by
3057 overwriting a REG rtx which is always shared. */
3058 sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3059 if (validate_change (insn, loc, sub, 0)
3060 || validate_replace_rtx (x, sub, insn))
3061 return true;
3063 start_sequence ();
3064 sub = force_operand (sub, NULL_RTX);
3065 if (! validate_change (insn, loc, sub, 0)
3066 && ! validate_replace_rtx (x, sub, insn))
3067 abort ();
3069 insns = get_insns ();
3070 end_sequence ();
3071 emit_insn_before (insns, insn);
3072 return true;
3075 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3077 rtx sub = XEXP (XEXP (x, 0), 0);
3079 if (GET_CODE (sub) == MEM)
3080 sub = adjust_address_nv (sub, GET_MODE (x), 0);
3081 else if (GET_CODE (sub) == REG
3082 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3084 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3086 int size_x, size_sub;
3088 if (!insn)
3090 /* When processing REG_NOTES look at the list of
3091 replacements done on the insn to find the register that X
3092 was replaced by. */
3093 rtx tem;
3095 for (tem = purge_bitfield_addressof_replacements;
3096 tem != NULL_RTX;
3097 tem = XEXP (XEXP (tem, 1), 1))
3098 if (rtx_equal_p (x, XEXP (tem, 0)))
3100 *loc = XEXP (XEXP (tem, 1), 0);
3101 return true;
3104 /* See comment for purge_addressof_replacements. */
3105 for (tem = purge_addressof_replacements;
3106 tem != NULL_RTX;
3107 tem = XEXP (XEXP (tem, 1), 1))
3108 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3110 rtx z = XEXP (XEXP (tem, 1), 0);
3112 if (GET_MODE (x) == GET_MODE (z)
3113 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3114 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3115 abort ();
3117 /* It can happen that the note may speak of things
3118 in a wider (or just different) mode than the
3119 code did. This is especially true of
3120 REG_RETVAL. */
3122 if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3123 z = SUBREG_REG (z);
3125 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3126 && (GET_MODE_SIZE (GET_MODE (x))
3127 > GET_MODE_SIZE (GET_MODE (z))))
3129 /* This can occur as a result in invalid
3130 pointer casts, e.g. float f; ...
3131 *(long long int *)&f.
3132 ??? We could emit a warning here, but
3133 without a line number that wouldn't be
3134 very helpful. */
3135 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3137 else
3138 z = gen_lowpart (GET_MODE (x), z);
3140 *loc = z;
3141 return true;
3144 /* Sometimes we may not be able to find the replacement. For
3145 example when the original insn was a MEM in a wider mode,
3146 and the note is part of a sign extension of a narrowed
3147 version of that MEM. Gcc testcase compile/990829-1.c can
3148 generate an example of this situation. Rather than complain
3149 we return false, which will prompt our caller to remove the
3150 offending note. */
3151 return false;
3154 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3155 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3157 /* Don't even consider working with paradoxical subregs,
3158 or the moral equivalent seen here. */
3159 if (size_x <= size_sub
3160 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3162 /* Do a bitfield insertion to mirror what would happen
3163 in memory. */
3165 rtx val, seq;
3167 if (store)
3169 rtx p = PREV_INSN (insn);
3171 start_sequence ();
3172 val = gen_reg_rtx (GET_MODE (x));
3173 if (! validate_change (insn, loc, val, 0))
3175 /* Discard the current sequence and put the
3176 ADDRESSOF on stack. */
3177 end_sequence ();
3178 goto give_up;
3180 seq = get_insns ();
3181 end_sequence ();
3182 emit_insn_before (seq, insn);
3183 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3184 insn, ht);
3186 start_sequence ();
3187 store_bit_field (sub, size_x, 0, GET_MODE (x),
3188 val, GET_MODE_SIZE (GET_MODE (sub)));
3190 /* Make sure to unshare any shared rtl that store_bit_field
3191 might have created. */
3192 unshare_all_rtl_again (get_insns ());
3194 seq = get_insns ();
3195 end_sequence ();
3196 p = emit_insn_after (seq, insn);
3197 if (NEXT_INSN (insn))
3198 compute_insns_for_mem (NEXT_INSN (insn),
3199 p ? NEXT_INSN (p) : NULL_RTX,
3200 ht);
3202 else
3204 rtx p = PREV_INSN (insn);
3206 start_sequence ();
3207 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3208 GET_MODE (x), GET_MODE (x),
3209 GET_MODE_SIZE (GET_MODE (sub)));
3211 if (! validate_change (insn, loc, val, 0))
3213 /* Discard the current sequence and put the
3214 ADDRESSOF on stack. */
3215 end_sequence ();
3216 goto give_up;
3219 seq = get_insns ();
3220 end_sequence ();
3221 emit_insn_before (seq, insn);
3222 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3223 insn, ht);
3226 /* Remember the replacement so that the same one can be done
3227 on the REG_NOTES. */
3228 purge_bitfield_addressof_replacements
3229 = gen_rtx_EXPR_LIST (VOIDmode, x,
3230 gen_rtx_EXPR_LIST
3231 (VOIDmode, val,
3232 purge_bitfield_addressof_replacements));
3234 /* We replaced with a reg -- all done. */
3235 return true;
3239 else if (validate_change (insn, loc, sub, 0))
3241 /* Remember the replacement so that the same one can be done
3242 on the REG_NOTES. */
3243 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3245 rtx tem;
3247 for (tem = purge_addressof_replacements;
3248 tem != NULL_RTX;
3249 tem = XEXP (XEXP (tem, 1), 1))
3250 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3252 XEXP (XEXP (tem, 1), 0) = sub;
3253 return true;
3255 purge_addressof_replacements
3256 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3257 gen_rtx_EXPR_LIST (VOIDmode, sub,
3258 purge_addressof_replacements));
3259 return true;
3261 goto restart;
3265 give_up:
3266 /* Scan all subexpressions. */
3267 fmt = GET_RTX_FORMAT (code);
3268 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3270 if (*fmt == 'e')
3271 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3272 else if (*fmt == 'E')
3273 for (j = 0; j < XVECLEN (x, i); j++)
3274 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3277 return result;
3280 /* Return a hash value for K, a REG. */
3282 static hashval_t
3283 insns_for_mem_hash (k)
3284 const void * k;
3286 /* Use the address of the key for the hash value. */
3287 struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3288 return (hashval_t) m->key;
3291 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3293 static int
3294 insns_for_mem_comp (k1, k2)
3295 const void * k1;
3296 const void * k2;
3298 struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3299 struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3300 return m1->key == m2->key;
3303 struct insns_for_mem_walk_info
3305 /* The hash table that we are using to record which INSNs use which
3306 MEMs. */
3307 htab_t ht;
3309 /* The INSN we are currently processing. */
3310 rtx insn;
3312 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3313 to find the insns that use the REGs in the ADDRESSOFs. */
3314 int pass;
3317 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3318 that might be used in an ADDRESSOF expression, record this INSN in
3319 the hash table given by DATA (which is really a pointer to an
3320 insns_for_mem_walk_info structure). */
3322 static int
3323 insns_for_mem_walk (r, data)
3324 rtx *r;
3325 void *data;
3327 struct insns_for_mem_walk_info *ifmwi
3328 = (struct insns_for_mem_walk_info *) data;
3329 struct insns_for_mem_entry tmp;
3330 tmp.insns = NULL_RTX;
3332 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3333 && GET_CODE (XEXP (*r, 0)) == REG)
3335 PTR *e;
3336 tmp.key = XEXP (*r, 0);
3337 e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3338 if (*e == NULL)
3340 *e = ggc_alloc (sizeof (tmp));
3341 memcpy (*e, &tmp, sizeof (tmp));
3344 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3346 struct insns_for_mem_entry *ifme;
3347 tmp.key = *r;
3348 ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3350 /* If we have not already recorded this INSN, do so now. Since
3351 we process the INSNs in order, we know that if we have
3352 recorded it it must be at the front of the list. */
3353 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3354 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3355 ifme->insns);
3358 return 0;
3361 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3362 which REGs in HT. */
3364 static void
3365 compute_insns_for_mem (insns, last_insn, ht)
3366 rtx insns;
3367 rtx last_insn;
3368 htab_t ht;
3370 rtx insn;
3371 struct insns_for_mem_walk_info ifmwi;
3372 ifmwi.ht = ht;
3374 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3375 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3376 if (INSN_P (insn))
3378 ifmwi.insn = insn;
3379 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3383 /* Helper function for purge_addressof called through for_each_rtx.
3384 Returns true iff the rtl is an ADDRESSOF. */
3386 static int
3387 is_addressof (rtl, data)
3388 rtx *rtl;
3389 void *data ATTRIBUTE_UNUSED;
3391 return GET_CODE (*rtl) == ADDRESSOF;
3394 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3395 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3396 stack. */
3398 void
3399 purge_addressof (insns)
3400 rtx insns;
3402 rtx insn;
3403 htab_t ht;
3405 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3406 requires a fixup pass over the instruction stream to correct
3407 INSNs that depended on the REG being a REG, and not a MEM. But,
3408 these fixup passes are slow. Furthermore, most MEMs are not
3409 mentioned in very many instructions. So, we speed up the process
3410 by pre-calculating which REGs occur in which INSNs; that allows
3411 us to perform the fixup passes much more quickly. */
3412 ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3413 compute_insns_for_mem (insns, NULL_RTX, ht);
3415 for (insn = insns; insn; insn = NEXT_INSN (insn))
3416 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3417 || GET_CODE (insn) == CALL_INSN)
3419 if (! purge_addressof_1 (&PATTERN (insn), insn,
3420 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3421 /* If we could not replace the ADDRESSOFs in the insn,
3422 something is wrong. */
3423 abort ();
3425 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, ht))
3427 /* If we could not replace the ADDRESSOFs in the insn's notes,
3428 we can just remove the offending notes instead. */
3429 rtx note;
3431 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3433 /* If we find a REG_RETVAL note then the insn is a libcall.
3434 Such insns must have REG_EQUAL notes as well, in order
3435 for later passes of the compiler to work. So it is not
3436 safe to delete the notes here, and instead we abort. */
3437 if (REG_NOTE_KIND (note) == REG_RETVAL)
3438 abort ();
3439 if (for_each_rtx (&note, is_addressof, NULL))
3440 remove_note (insn, note);
3445 /* Clean up. */
3446 purge_bitfield_addressof_replacements = 0;
3447 purge_addressof_replacements = 0;
3449 /* REGs are shared. purge_addressof will destructively replace a REG
3450 with a MEM, which creates shared MEMs.
3452 Unfortunately, the children of put_reg_into_stack assume that MEMs
3453 referring to the same stack slot are shared (fixup_var_refs and
3454 the associated hash table code).
3456 So, we have to do another unsharing pass after we have flushed any
3457 REGs that had their address taken into the stack.
3459 It may be worth tracking whether or not we converted any REGs into
3460 MEMs to avoid this overhead when it is not needed. */
3461 unshare_all_rtl_again (get_insns ());
3464 /* Convert a SET of a hard subreg to a set of the appropriate hard
3465 register. A subroutine of purge_hard_subreg_sets. */
3467 static void
3468 purge_single_hard_subreg_set (pattern)
3469 rtx pattern;
3471 rtx reg = SET_DEST (pattern);
3472 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3473 int offset = 0;
3475 if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3476 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3478 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3479 GET_MODE (SUBREG_REG (reg)),
3480 SUBREG_BYTE (reg),
3481 GET_MODE (reg));
3482 reg = SUBREG_REG (reg);
3486 if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3488 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3489 SET_DEST (pattern) = reg;
3493 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3494 only such SETs that we expect to see are those left in because
3495 integrate can't handle sets of parts of a return value register.
3497 We don't use alter_subreg because we only want to eliminate subregs
3498 of hard registers. */
3500 void
3501 purge_hard_subreg_sets (insn)
3502 rtx insn;
3504 for (; insn; insn = NEXT_INSN (insn))
3506 if (INSN_P (insn))
3508 rtx pattern = PATTERN (insn);
3509 switch (GET_CODE (pattern))
3511 case SET:
3512 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3513 purge_single_hard_subreg_set (pattern);
3514 break;
3515 case PARALLEL:
3517 int j;
3518 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3520 rtx inner_pattern = XVECEXP (pattern, 0, j);
3521 if (GET_CODE (inner_pattern) == SET
3522 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3523 purge_single_hard_subreg_set (inner_pattern);
3526 break;
3527 default:
3528 break;
3534 /* Pass through the INSNS of function FNDECL and convert virtual register
3535 references to hard register references. */
3537 void
3538 instantiate_virtual_regs (fndecl, insns)
3539 tree fndecl;
3540 rtx insns;
3542 rtx insn;
3543 unsigned int i;
3545 /* Compute the offsets to use for this function. */
3546 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3547 var_offset = STARTING_FRAME_OFFSET;
3548 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3549 out_arg_offset = STACK_POINTER_OFFSET;
3550 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3552 /* Scan all variables and parameters of this function. For each that is
3553 in memory, instantiate all virtual registers if the result is a valid
3554 address. If not, we do it later. That will handle most uses of virtual
3555 regs on many machines. */
3556 instantiate_decls (fndecl, 1);
3558 /* Initialize recognition, indicating that volatile is OK. */
3559 init_recog ();
3561 /* Scan through all the insns, instantiating every virtual register still
3562 present. */
3563 for (insn = insns; insn; insn = NEXT_INSN (insn))
3564 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3565 || GET_CODE (insn) == CALL_INSN)
3567 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3568 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3569 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3570 if (GET_CODE (insn) == CALL_INSN)
3571 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3572 NULL_RTX, 0);
3575 /* Instantiate the stack slots for the parm registers, for later use in
3576 addressof elimination. */
3577 for (i = 0; i < max_parm_reg; ++i)
3578 if (parm_reg_stack_loc[i])
3579 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3581 /* Now instantiate the remaining register equivalences for debugging info.
3582 These will not be valid addresses. */
3583 instantiate_decls (fndecl, 0);
3585 /* Indicate that, from now on, assign_stack_local should use
3586 frame_pointer_rtx. */
3587 virtuals_instantiated = 1;
3590 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3591 all virtual registers in their DECL_RTL's.
3593 If VALID_ONLY, do this only if the resulting address is still valid.
3594 Otherwise, always do it. */
3596 static void
3597 instantiate_decls (fndecl, valid_only)
3598 tree fndecl;
3599 int valid_only;
3601 tree decl;
3603 /* Process all parameters of the function. */
3604 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3606 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3607 HOST_WIDE_INT size_rtl;
3609 instantiate_decl (DECL_RTL (decl), size, valid_only);
3611 /* If the parameter was promoted, then the incoming RTL mode may be
3612 larger than the declared type size. We must use the larger of
3613 the two sizes. */
3614 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3615 size = MAX (size_rtl, size);
3616 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3619 /* Now process all variables defined in the function or its subblocks. */
3620 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3623 /* Subroutine of instantiate_decls: Process all decls in the given
3624 BLOCK node and all its subblocks. */
3626 static void
3627 instantiate_decls_1 (let, valid_only)
3628 tree let;
3629 int valid_only;
3631 tree t;
3633 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3634 if (DECL_RTL_SET_P (t))
3635 instantiate_decl (DECL_RTL (t),
3636 int_size_in_bytes (TREE_TYPE (t)),
3637 valid_only);
3639 /* Process all subblocks. */
3640 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3641 instantiate_decls_1 (t, valid_only);
3644 /* Subroutine of the preceding procedures: Given RTL representing a
3645 decl and the size of the object, do any instantiation required.
3647 If VALID_ONLY is non-zero, it means that the RTL should only be
3648 changed if the new address is valid. */
3650 static void
3651 instantiate_decl (x, size, valid_only)
3652 rtx x;
3653 HOST_WIDE_INT size;
3654 int valid_only;
3656 enum machine_mode mode;
3657 rtx addr;
3659 /* If this is not a MEM, no need to do anything. Similarly if the
3660 address is a constant or a register that is not a virtual register. */
3662 if (x == 0 || GET_CODE (x) != MEM)
3663 return;
3665 addr = XEXP (x, 0);
3666 if (CONSTANT_P (addr)
3667 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3668 || (GET_CODE (addr) == REG
3669 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3670 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3671 return;
3673 /* If we should only do this if the address is valid, copy the address.
3674 We need to do this so we can undo any changes that might make the
3675 address invalid. This copy is unfortunate, but probably can't be
3676 avoided. */
3678 if (valid_only)
3679 addr = copy_rtx (addr);
3681 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3683 if (valid_only && size >= 0)
3685 unsigned HOST_WIDE_INT decl_size = size;
3687 /* Now verify that the resulting address is valid for every integer or
3688 floating-point mode up to and including SIZE bytes long. We do this
3689 since the object might be accessed in any mode and frame addresses
3690 are shared. */
3692 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3693 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3694 mode = GET_MODE_WIDER_MODE (mode))
3695 if (! memory_address_p (mode, addr))
3696 return;
3698 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3699 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3700 mode = GET_MODE_WIDER_MODE (mode))
3701 if (! memory_address_p (mode, addr))
3702 return;
3705 /* Put back the address now that we have updated it and we either know
3706 it is valid or we don't care whether it is valid. */
3708 XEXP (x, 0) = addr;
3711 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3712 is a virtual register, return the equivalent hard register and set the
3713 offset indirectly through the pointer. Otherwise, return 0. */
3715 static rtx
3716 instantiate_new_reg (x, poffset)
3717 rtx x;
3718 HOST_WIDE_INT *poffset;
3720 rtx new;
3721 HOST_WIDE_INT offset;
3723 if (x == virtual_incoming_args_rtx)
3724 new = arg_pointer_rtx, offset = in_arg_offset;
3725 else if (x == virtual_stack_vars_rtx)
3726 new = frame_pointer_rtx, offset = var_offset;
3727 else if (x == virtual_stack_dynamic_rtx)
3728 new = stack_pointer_rtx, offset = dynamic_offset;
3729 else if (x == virtual_outgoing_args_rtx)
3730 new = stack_pointer_rtx, offset = out_arg_offset;
3731 else if (x == virtual_cfa_rtx)
3732 new = arg_pointer_rtx, offset = cfa_offset;
3733 else
3734 return 0;
3736 *poffset = offset;
3737 return new;
3740 /* Given a pointer to a piece of rtx and an optional pointer to the
3741 containing object, instantiate any virtual registers present in it.
3743 If EXTRA_INSNS, we always do the replacement and generate
3744 any extra insns before OBJECT. If it zero, we do nothing if replacement
3745 is not valid.
3747 Return 1 if we either had nothing to do or if we were able to do the
3748 needed replacement. Return 0 otherwise; we only return zero if
3749 EXTRA_INSNS is zero.
3751 We first try some simple transformations to avoid the creation of extra
3752 pseudos. */
3754 static int
3755 instantiate_virtual_regs_1 (loc, object, extra_insns)
3756 rtx *loc;
3757 rtx object;
3758 int extra_insns;
3760 rtx x;
3761 RTX_CODE code;
3762 rtx new = 0;
3763 HOST_WIDE_INT offset = 0;
3764 rtx temp;
3765 rtx seq;
3766 int i, j;
3767 const char *fmt;
3769 /* Re-start here to avoid recursion in common cases. */
3770 restart:
3772 x = *loc;
3773 if (x == 0)
3774 return 1;
3776 code = GET_CODE (x);
3778 /* Check for some special cases. */
3779 switch (code)
3781 case CONST_INT:
3782 case CONST_DOUBLE:
3783 case CONST_VECTOR:
3784 case CONST:
3785 case SYMBOL_REF:
3786 case CODE_LABEL:
3787 case PC:
3788 case CC0:
3789 case ASM_INPUT:
3790 case ADDR_VEC:
3791 case ADDR_DIFF_VEC:
3792 case RETURN:
3793 return 1;
3795 case SET:
3796 /* We are allowed to set the virtual registers. This means that
3797 the actual register should receive the source minus the
3798 appropriate offset. This is used, for example, in the handling
3799 of non-local gotos. */
3800 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3802 rtx src = SET_SRC (x);
3804 /* We are setting the register, not using it, so the relevant
3805 offset is the negative of the offset to use were we using
3806 the register. */
3807 offset = - offset;
3808 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3810 /* The only valid sources here are PLUS or REG. Just do
3811 the simplest possible thing to handle them. */
3812 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3813 abort ();
3815 start_sequence ();
3816 if (GET_CODE (src) != REG)
3817 temp = force_operand (src, NULL_RTX);
3818 else
3819 temp = src;
3820 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3821 seq = get_insns ();
3822 end_sequence ();
3824 emit_insn_before (seq, object);
3825 SET_DEST (x) = new;
3827 if (! validate_change (object, &SET_SRC (x), temp, 0)
3828 || ! extra_insns)
3829 abort ();
3831 return 1;
3834 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3835 loc = &SET_SRC (x);
3836 goto restart;
3838 case PLUS:
3839 /* Handle special case of virtual register plus constant. */
3840 if (CONSTANT_P (XEXP (x, 1)))
3842 rtx old, new_offset;
3844 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3845 if (GET_CODE (XEXP (x, 0)) == PLUS)
3847 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3849 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3850 extra_insns);
3851 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3853 else
3855 loc = &XEXP (x, 0);
3856 goto restart;
3860 #ifdef POINTERS_EXTEND_UNSIGNED
3861 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3862 we can commute the PLUS and SUBREG because pointers into the
3863 frame are well-behaved. */
3864 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3865 && GET_CODE (XEXP (x, 1)) == CONST_INT
3866 && 0 != (new
3867 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3868 &offset))
3869 && validate_change (object, loc,
3870 plus_constant (gen_lowpart (ptr_mode,
3871 new),
3872 offset
3873 + INTVAL (XEXP (x, 1))),
3875 return 1;
3876 #endif
3877 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3879 /* We know the second operand is a constant. Unless the
3880 first operand is a REG (which has been already checked),
3881 it needs to be checked. */
3882 if (GET_CODE (XEXP (x, 0)) != REG)
3884 loc = &XEXP (x, 0);
3885 goto restart;
3887 return 1;
3890 new_offset = plus_constant (XEXP (x, 1), offset);
3892 /* If the new constant is zero, try to replace the sum with just
3893 the register. */
3894 if (new_offset == const0_rtx
3895 && validate_change (object, loc, new, 0))
3896 return 1;
3898 /* Next try to replace the register and new offset.
3899 There are two changes to validate here and we can't assume that
3900 in the case of old offset equals new just changing the register
3901 will yield a valid insn. In the interests of a little efficiency,
3902 however, we only call validate change once (we don't queue up the
3903 changes and then call apply_change_group). */
3905 old = XEXP (x, 0);
3906 if (offset == 0
3907 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3908 : (XEXP (x, 0) = new,
3909 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3911 if (! extra_insns)
3913 XEXP (x, 0) = old;
3914 return 0;
3917 /* Otherwise copy the new constant into a register and replace
3918 constant with that register. */
3919 temp = gen_reg_rtx (Pmode);
3920 XEXP (x, 0) = new;
3921 if (validate_change (object, &XEXP (x, 1), temp, 0))
3922 emit_insn_before (gen_move_insn (temp, new_offset), object);
3923 else
3925 /* If that didn't work, replace this expression with a
3926 register containing the sum. */
3928 XEXP (x, 0) = old;
3929 new = gen_rtx_PLUS (Pmode, new, new_offset);
3931 start_sequence ();
3932 temp = force_operand (new, NULL_RTX);
3933 seq = get_insns ();
3934 end_sequence ();
3936 emit_insn_before (seq, object);
3937 if (! validate_change (object, loc, temp, 0)
3938 && ! validate_replace_rtx (x, temp, object))
3939 abort ();
3943 return 1;
3946 /* Fall through to generic two-operand expression case. */
3947 case EXPR_LIST:
3948 case CALL:
3949 case COMPARE:
3950 case MINUS:
3951 case MULT:
3952 case DIV: case UDIV:
3953 case MOD: case UMOD:
3954 case AND: case IOR: case XOR:
3955 case ROTATERT: case ROTATE:
3956 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3957 case NE: case EQ:
3958 case GE: case GT: case GEU: case GTU:
3959 case LE: case LT: case LEU: case LTU:
3960 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3961 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3962 loc = &XEXP (x, 0);
3963 goto restart;
3965 case MEM:
3966 /* Most cases of MEM that convert to valid addresses have already been
3967 handled by our scan of decls. The only special handling we
3968 need here is to make a copy of the rtx to ensure it isn't being
3969 shared if we have to change it to a pseudo.
3971 If the rtx is a simple reference to an address via a virtual register,
3972 it can potentially be shared. In such cases, first try to make it
3973 a valid address, which can also be shared. Otherwise, copy it and
3974 proceed normally.
3976 First check for common cases that need no processing. These are
3977 usually due to instantiation already being done on a previous instance
3978 of a shared rtx. */
3980 temp = XEXP (x, 0);
3981 if (CONSTANT_ADDRESS_P (temp)
3982 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3983 || temp == arg_pointer_rtx
3984 #endif
3985 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3986 || temp == hard_frame_pointer_rtx
3987 #endif
3988 || temp == frame_pointer_rtx)
3989 return 1;
3991 if (GET_CODE (temp) == PLUS
3992 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3993 && (XEXP (temp, 0) == frame_pointer_rtx
3994 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3995 || XEXP (temp, 0) == hard_frame_pointer_rtx
3996 #endif
3997 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3998 || XEXP (temp, 0) == arg_pointer_rtx
3999 #endif
4001 return 1;
4003 if (temp == virtual_stack_vars_rtx
4004 || temp == virtual_incoming_args_rtx
4005 || (GET_CODE (temp) == PLUS
4006 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4007 && (XEXP (temp, 0) == virtual_stack_vars_rtx
4008 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4010 /* This MEM may be shared. If the substitution can be done without
4011 the need to generate new pseudos, we want to do it in place
4012 so all copies of the shared rtx benefit. The call below will
4013 only make substitutions if the resulting address is still
4014 valid.
4016 Note that we cannot pass X as the object in the recursive call
4017 since the insn being processed may not allow all valid
4018 addresses. However, if we were not passed on object, we can
4019 only modify X without copying it if X will have a valid
4020 address.
4022 ??? Also note that this can still lose if OBJECT is an insn that
4023 has less restrictions on an address that some other insn.
4024 In that case, we will modify the shared address. This case
4025 doesn't seem very likely, though. One case where this could
4026 happen is in the case of a USE or CLOBBER reference, but we
4027 take care of that below. */
4029 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4030 object ? object : x, 0))
4031 return 1;
4033 /* Otherwise make a copy and process that copy. We copy the entire
4034 RTL expression since it might be a PLUS which could also be
4035 shared. */
4036 *loc = x = copy_rtx (x);
4039 /* Fall through to generic unary operation case. */
4040 case PREFETCH:
4041 case SUBREG:
4042 case STRICT_LOW_PART:
4043 case NEG: case NOT:
4044 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
4045 case SIGN_EXTEND: case ZERO_EXTEND:
4046 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4047 case FLOAT: case FIX:
4048 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4049 case ABS:
4050 case SQRT:
4051 case FFS:
4052 /* These case either have just one operand or we know that we need not
4053 check the rest of the operands. */
4054 loc = &XEXP (x, 0);
4055 goto restart;
4057 case USE:
4058 case CLOBBER:
4059 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4060 go ahead and make the invalid one, but do it to a copy. For a REG,
4061 just make the recursive call, since there's no chance of a problem. */
4063 if ((GET_CODE (XEXP (x, 0)) == MEM
4064 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4066 || (GET_CODE (XEXP (x, 0)) == REG
4067 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4068 return 1;
4070 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4071 loc = &XEXP (x, 0);
4072 goto restart;
4074 case REG:
4075 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4076 in front of this insn and substitute the temporary. */
4077 if ((new = instantiate_new_reg (x, &offset)) != 0)
4079 temp = plus_constant (new, offset);
4080 if (!validate_change (object, loc, temp, 0))
4082 if (! extra_insns)
4083 return 0;
4085 start_sequence ();
4086 temp = force_operand (temp, NULL_RTX);
4087 seq = get_insns ();
4088 end_sequence ();
4090 emit_insn_before (seq, object);
4091 if (! validate_change (object, loc, temp, 0)
4092 && ! validate_replace_rtx (x, temp, object))
4093 abort ();
4097 return 1;
4099 case ADDRESSOF:
4100 if (GET_CODE (XEXP (x, 0)) == REG)
4101 return 1;
4103 else if (GET_CODE (XEXP (x, 0)) == MEM)
4105 /* If we have a (addressof (mem ..)), do any instantiation inside
4106 since we know we'll be making the inside valid when we finally
4107 remove the ADDRESSOF. */
4108 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4109 return 1;
4111 break;
4113 default:
4114 break;
4117 /* Scan all subexpressions. */
4118 fmt = GET_RTX_FORMAT (code);
4119 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4120 if (*fmt == 'e')
4122 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4123 return 0;
4125 else if (*fmt == 'E')
4126 for (j = 0; j < XVECLEN (x, i); j++)
4127 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4128 extra_insns))
4129 return 0;
4131 return 1;
4134 /* Optimization: assuming this function does not receive nonlocal gotos,
4135 delete the handlers for such, as well as the insns to establish
4136 and disestablish them. */
4138 static void
4139 delete_handlers ()
4141 rtx insn;
4142 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4144 /* Delete the handler by turning off the flag that would
4145 prevent jump_optimize from deleting it.
4146 Also permit deletion of the nonlocal labels themselves
4147 if nothing local refers to them. */
4148 if (GET_CODE (insn) == CODE_LABEL)
4150 tree t, last_t;
4152 LABEL_PRESERVE_P (insn) = 0;
4154 /* Remove it from the nonlocal_label list, to avoid confusing
4155 flow. */
4156 for (t = nonlocal_labels, last_t = 0; t;
4157 last_t = t, t = TREE_CHAIN (t))
4158 if (DECL_RTL (TREE_VALUE (t)) == insn)
4159 break;
4160 if (t)
4162 if (! last_t)
4163 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4164 else
4165 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4168 if (GET_CODE (insn) == INSN)
4170 int can_delete = 0;
4171 rtx t;
4172 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4173 if (reg_mentioned_p (t, PATTERN (insn)))
4175 can_delete = 1;
4176 break;
4178 if (can_delete
4179 || (nonlocal_goto_stack_level != 0
4180 && reg_mentioned_p (nonlocal_goto_stack_level,
4181 PATTERN (insn))))
4182 delete_related_insns (insn);
4188 max_parm_reg_num ()
4190 return max_parm_reg;
4193 /* Return the first insn following those generated by `assign_parms'. */
4196 get_first_nonparm_insn ()
4198 if (last_parm_insn)
4199 return NEXT_INSN (last_parm_insn);
4200 return get_insns ();
4203 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4204 Crash if there is none. */
4207 get_first_block_beg ()
4209 rtx searcher;
4210 rtx insn = get_first_nonparm_insn ();
4212 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4213 if (GET_CODE (searcher) == NOTE
4214 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4215 return searcher;
4217 abort (); /* Invalid call to this function. (See comments above.) */
4218 return NULL_RTX;
4221 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4222 This means a type for which function calls must pass an address to the
4223 function or get an address back from the function.
4224 EXP may be a type node or an expression (whose type is tested). */
4227 aggregate_value_p (exp)
4228 tree exp;
4230 int i, regno, nregs;
4231 rtx reg;
4233 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4235 if (TREE_CODE (type) == VOID_TYPE)
4236 return 0;
4237 if (RETURN_IN_MEMORY (type))
4238 return 1;
4239 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4240 and thus can't be returned in registers. */
4241 if (TREE_ADDRESSABLE (type))
4242 return 1;
4243 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4244 return 1;
4245 /* Make sure we have suitable call-clobbered regs to return
4246 the value in; if not, we must return it in memory. */
4247 reg = hard_function_value (type, 0, 0);
4249 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4250 it is OK. */
4251 if (GET_CODE (reg) != REG)
4252 return 0;
4254 regno = REGNO (reg);
4255 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4256 for (i = 0; i < nregs; i++)
4257 if (! call_used_regs[regno + i])
4258 return 1;
4259 return 0;
4262 /* Assign RTL expressions to the function's parameters.
4263 This may involve copying them into registers and using
4264 those registers as the RTL for them. */
4266 void
4267 assign_parms (fndecl)
4268 tree fndecl;
4270 tree parm;
4271 rtx entry_parm = 0;
4272 rtx stack_parm = 0;
4273 CUMULATIVE_ARGS args_so_far;
4274 enum machine_mode promoted_mode, passed_mode;
4275 enum machine_mode nominal_mode, promoted_nominal_mode;
4276 int unsignedp;
4277 /* Total space needed so far for args on the stack,
4278 given as a constant and a tree-expression. */
4279 struct args_size stack_args_size;
4280 tree fntype = TREE_TYPE (fndecl);
4281 tree fnargs = DECL_ARGUMENTS (fndecl);
4282 /* This is used for the arg pointer when referring to stack args. */
4283 rtx internal_arg_pointer;
4284 /* This is a dummy PARM_DECL that we used for the function result if
4285 the function returns a structure. */
4286 tree function_result_decl = 0;
4287 #ifdef SETUP_INCOMING_VARARGS
4288 int varargs_setup = 0;
4289 #endif
4290 rtx conversion_insns = 0;
4291 struct args_size alignment_pad;
4293 /* Nonzero if function takes extra anonymous args.
4294 This means the last named arg must be on the stack
4295 right before the anonymous ones. */
4296 int stdarg
4297 = (TYPE_ARG_TYPES (fntype) != 0
4298 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4299 != void_type_node));
4301 current_function_stdarg = stdarg;
4303 /* If the reg that the virtual arg pointer will be translated into is
4304 not a fixed reg or is the stack pointer, make a copy of the virtual
4305 arg pointer, and address parms via the copy. The frame pointer is
4306 considered fixed even though it is not marked as such.
4308 The second time through, simply use ap to avoid generating rtx. */
4310 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4311 || ! (fixed_regs[ARG_POINTER_REGNUM]
4312 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4313 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4314 else
4315 internal_arg_pointer = virtual_incoming_args_rtx;
4316 current_function_internal_arg_pointer = internal_arg_pointer;
4318 stack_args_size.constant = 0;
4319 stack_args_size.var = 0;
4321 /* If struct value address is treated as the first argument, make it so. */
4322 if (aggregate_value_p (DECL_RESULT (fndecl))
4323 && ! current_function_returns_pcc_struct
4324 && struct_value_incoming_rtx == 0)
4326 tree type = build_pointer_type (TREE_TYPE (fntype));
4328 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4330 DECL_ARG_TYPE (function_result_decl) = type;
4331 TREE_CHAIN (function_result_decl) = fnargs;
4332 fnargs = function_result_decl;
4335 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4336 parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4338 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4339 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4340 #else
4341 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4342 #endif
4344 /* We haven't yet found an argument that we must push and pretend the
4345 caller did. */
4346 current_function_pretend_args_size = 0;
4348 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4350 struct args_size stack_offset;
4351 struct args_size arg_size;
4352 int passed_pointer = 0;
4353 int did_conversion = 0;
4354 tree passed_type = DECL_ARG_TYPE (parm);
4355 tree nominal_type = TREE_TYPE (parm);
4356 int pretend_named;
4357 int last_named = 0, named_arg;
4359 /* Set LAST_NAMED if this is last named arg before last
4360 anonymous args. */
4361 if (stdarg)
4363 tree tem;
4365 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4366 if (DECL_NAME (tem))
4367 break;
4369 if (tem == 0)
4370 last_named = 1;
4372 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4373 most machines, if this is a varargs/stdarg function, then we treat
4374 the last named arg as if it were anonymous too. */
4375 named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4377 if (TREE_TYPE (parm) == error_mark_node
4378 /* This can happen after weird syntax errors
4379 or if an enum type is defined among the parms. */
4380 || TREE_CODE (parm) != PARM_DECL
4381 || passed_type == NULL)
4383 SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4384 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4385 TREE_USED (parm) = 1;
4386 continue;
4389 /* Find mode of arg as it is passed, and mode of arg
4390 as it should be during execution of this function. */
4391 passed_mode = TYPE_MODE (passed_type);
4392 nominal_mode = TYPE_MODE (nominal_type);
4394 /* If the parm's mode is VOID, its value doesn't matter,
4395 and avoid the usual things like emit_move_insn that could crash. */
4396 if (nominal_mode == VOIDmode)
4398 SET_DECL_RTL (parm, const0_rtx);
4399 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4400 continue;
4403 /* If the parm is to be passed as a transparent union, use the
4404 type of the first field for the tests below. We have already
4405 verified that the modes are the same. */
4406 if (DECL_TRANSPARENT_UNION (parm)
4407 || (TREE_CODE (passed_type) == UNION_TYPE
4408 && TYPE_TRANSPARENT_UNION (passed_type)))
4409 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4411 /* See if this arg was passed by invisible reference. It is if
4412 it is an object whose size depends on the contents of the
4413 object itself or if the machine requires these objects be passed
4414 that way. */
4416 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4417 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4418 || TREE_ADDRESSABLE (passed_type)
4419 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4420 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4421 passed_type, named_arg)
4422 #endif
4425 passed_type = nominal_type = build_pointer_type (passed_type);
4426 passed_pointer = 1;
4427 passed_mode = nominal_mode = Pmode;
4430 promoted_mode = passed_mode;
4432 #ifdef PROMOTE_FUNCTION_ARGS
4433 /* Compute the mode in which the arg is actually extended to. */
4434 unsignedp = TREE_UNSIGNED (passed_type);
4435 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4436 #endif
4438 /* Let machine desc say which reg (if any) the parm arrives in.
4439 0 means it arrives on the stack. */
4440 #ifdef FUNCTION_INCOMING_ARG
4441 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4442 passed_type, named_arg);
4443 #else
4444 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4445 passed_type, named_arg);
4446 #endif
4448 if (entry_parm == 0)
4449 promoted_mode = passed_mode;
4451 #ifdef SETUP_INCOMING_VARARGS
4452 /* If this is the last named parameter, do any required setup for
4453 varargs or stdargs. We need to know about the case of this being an
4454 addressable type, in which case we skip the registers it
4455 would have arrived in.
4457 For stdargs, LAST_NAMED will be set for two parameters, the one that
4458 is actually the last named, and the dummy parameter. We only
4459 want to do this action once.
4461 Also, indicate when RTL generation is to be suppressed. */
4462 if (last_named && !varargs_setup)
4464 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4465 current_function_pretend_args_size, 0);
4466 varargs_setup = 1;
4468 #endif
4470 /* Determine parm's home in the stack,
4471 in case it arrives in the stack or we should pretend it did.
4473 Compute the stack position and rtx where the argument arrives
4474 and its size.
4476 There is one complexity here: If this was a parameter that would
4477 have been passed in registers, but wasn't only because it is
4478 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4479 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4480 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4481 0 as it was the previous time. */
4483 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4484 locate_and_pad_parm (promoted_mode, passed_type,
4485 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4487 #else
4488 #ifdef FUNCTION_INCOMING_ARG
4489 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4490 passed_type,
4491 pretend_named) != 0,
4492 #else
4493 FUNCTION_ARG (args_so_far, promoted_mode,
4494 passed_type,
4495 pretend_named) != 0,
4496 #endif
4497 #endif
4498 fndecl, &stack_args_size, &stack_offset, &arg_size,
4499 &alignment_pad);
4502 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4504 if (offset_rtx == const0_rtx)
4505 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4506 else
4507 stack_parm = gen_rtx_MEM (promoted_mode,
4508 gen_rtx_PLUS (Pmode,
4509 internal_arg_pointer,
4510 offset_rtx));
4512 set_mem_attributes (stack_parm, parm, 1);
4515 /* If this parameter was passed both in registers and in the stack,
4516 use the copy on the stack. */
4517 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4518 entry_parm = 0;
4520 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4521 /* If this parm was passed part in regs and part in memory,
4522 pretend it arrived entirely in memory
4523 by pushing the register-part onto the stack.
4525 In the special case of a DImode or DFmode that is split,
4526 we could put it together in a pseudoreg directly,
4527 but for now that's not worth bothering with. */
4529 if (entry_parm)
4531 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4532 passed_type, named_arg);
4534 if (nregs > 0)
4536 current_function_pretend_args_size
4537 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4538 / (PARM_BOUNDARY / BITS_PER_UNIT)
4539 * (PARM_BOUNDARY / BITS_PER_UNIT));
4541 /* Handle calls that pass values in multiple non-contiguous
4542 locations. The Irix 6 ABI has examples of this. */
4543 if (GET_CODE (entry_parm) == PARALLEL)
4544 emit_group_store (validize_mem (stack_parm), entry_parm,
4545 int_size_in_bytes (TREE_TYPE (parm)));
4547 else
4548 move_block_from_reg (REGNO (entry_parm),
4549 validize_mem (stack_parm), nregs,
4550 int_size_in_bytes (TREE_TYPE (parm)));
4552 entry_parm = stack_parm;
4555 #endif
4557 /* If we didn't decide this parm came in a register,
4558 by default it came on the stack. */
4559 if (entry_parm == 0)
4560 entry_parm = stack_parm;
4562 /* Record permanently how this parm was passed. */
4563 DECL_INCOMING_RTL (parm) = entry_parm;
4565 /* If there is actually space on the stack for this parm,
4566 count it in stack_args_size; otherwise set stack_parm to 0
4567 to indicate there is no preallocated stack slot for the parm. */
4569 if (entry_parm == stack_parm
4570 || (GET_CODE (entry_parm) == PARALLEL
4571 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4572 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4573 /* On some machines, even if a parm value arrives in a register
4574 there is still an (uninitialized) stack slot allocated for it.
4576 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4577 whether this parameter already has a stack slot allocated,
4578 because an arg block exists only if current_function_args_size
4579 is larger than some threshold, and we haven't calculated that
4580 yet. So, for now, we just assume that stack slots never exist
4581 in this case. */
4582 || REG_PARM_STACK_SPACE (fndecl) > 0
4583 #endif
4586 stack_args_size.constant += arg_size.constant;
4587 if (arg_size.var)
4588 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4590 else
4591 /* No stack slot was pushed for this parm. */
4592 stack_parm = 0;
4594 /* Update info on where next arg arrives in registers. */
4596 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4597 passed_type, named_arg);
4599 /* If we can't trust the parm stack slot to be aligned enough
4600 for its ultimate type, don't use that slot after entry.
4601 We'll make another stack slot, if we need one. */
4603 unsigned int thisparm_boundary
4604 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4606 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4607 stack_parm = 0;
4610 /* If parm was passed in memory, and we need to convert it on entry,
4611 don't store it back in that same slot. */
4612 if (entry_parm != 0
4613 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4614 stack_parm = 0;
4616 /* When an argument is passed in multiple locations, we can't
4617 make use of this information, but we can save some copying if
4618 the whole argument is passed in a single register. */
4619 if (GET_CODE (entry_parm) == PARALLEL
4620 && nominal_mode != BLKmode && passed_mode != BLKmode)
4622 int i, len = XVECLEN (entry_parm, 0);
4624 for (i = 0; i < len; i++)
4625 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4626 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4627 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4628 == passed_mode)
4629 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4631 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4632 DECL_INCOMING_RTL (parm) = entry_parm;
4633 break;
4637 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4638 in the mode in which it arrives.
4639 STACK_PARM is an RTX for a stack slot where the parameter can live
4640 during the function (in case we want to put it there).
4641 STACK_PARM is 0 if no stack slot was pushed for it.
4643 Now output code if necessary to convert ENTRY_PARM to
4644 the type in which this function declares it,
4645 and store that result in an appropriate place,
4646 which may be a pseudo reg, may be STACK_PARM,
4647 or may be a local stack slot if STACK_PARM is 0.
4649 Set DECL_RTL to that place. */
4651 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4653 /* If a BLKmode arrives in registers, copy it to a stack slot.
4654 Handle calls that pass values in multiple non-contiguous
4655 locations. The Irix 6 ABI has examples of this. */
4656 if (GET_CODE (entry_parm) == REG
4657 || GET_CODE (entry_parm) == PARALLEL)
4659 int size_stored
4660 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4661 UNITS_PER_WORD);
4663 /* Note that we will be storing an integral number of words.
4664 So we have to be careful to ensure that we allocate an
4665 integral number of words. We do this below in the
4666 assign_stack_local if space was not allocated in the argument
4667 list. If it was, this will not work if PARM_BOUNDARY is not
4668 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4669 if it becomes a problem. */
4671 if (stack_parm == 0)
4673 stack_parm
4674 = assign_stack_local (GET_MODE (entry_parm),
4675 size_stored, 0);
4676 set_mem_attributes (stack_parm, parm, 1);
4679 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4680 abort ();
4682 /* Handle calls that pass values in multiple non-contiguous
4683 locations. The Irix 6 ABI has examples of this. */
4684 if (GET_CODE (entry_parm) == PARALLEL)
4685 emit_group_store (validize_mem (stack_parm), entry_parm,
4686 int_size_in_bytes (TREE_TYPE (parm)));
4687 else
4688 move_block_from_reg (REGNO (entry_parm),
4689 validize_mem (stack_parm),
4690 size_stored / UNITS_PER_WORD,
4691 int_size_in_bytes (TREE_TYPE (parm)));
4693 SET_DECL_RTL (parm, stack_parm);
4695 else if (! ((! optimize
4696 && ! DECL_REGISTER (parm))
4697 || TREE_SIDE_EFFECTS (parm)
4698 /* If -ffloat-store specified, don't put explicit
4699 float variables into registers. */
4700 || (flag_float_store
4701 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4702 /* Always assign pseudo to structure return or item passed
4703 by invisible reference. */
4704 || passed_pointer || parm == function_result_decl)
4706 /* Store the parm in a pseudoregister during the function, but we
4707 may need to do it in a wider mode. */
4709 rtx parmreg;
4710 unsigned int regno, regnoi = 0, regnor = 0;
4712 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4714 promoted_nominal_mode
4715 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4717 parmreg = gen_reg_rtx (promoted_nominal_mode);
4718 mark_user_reg (parmreg);
4720 /* If this was an item that we received a pointer to, set DECL_RTL
4721 appropriately. */
4722 if (passed_pointer)
4724 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4725 parmreg);
4726 set_mem_attributes (x, parm, 1);
4727 SET_DECL_RTL (parm, x);
4729 else
4731 SET_DECL_RTL (parm, parmreg);
4732 maybe_set_unchanging (DECL_RTL (parm), parm);
4735 /* Copy the value into the register. */
4736 if (nominal_mode != passed_mode
4737 || promoted_nominal_mode != promoted_mode)
4739 int save_tree_used;
4740 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4741 mode, by the caller. We now have to convert it to
4742 NOMINAL_MODE, if different. However, PARMREG may be in
4743 a different mode than NOMINAL_MODE if it is being stored
4744 promoted.
4746 If ENTRY_PARM is a hard register, it might be in a register
4747 not valid for operating in its mode (e.g., an odd-numbered
4748 register for a DFmode). In that case, moves are the only
4749 thing valid, so we can't do a convert from there. This
4750 occurs when the calling sequence allow such misaligned
4751 usages.
4753 In addition, the conversion may involve a call, which could
4754 clobber parameters which haven't been copied to pseudo
4755 registers yet. Therefore, we must first copy the parm to
4756 a pseudo reg here, and save the conversion until after all
4757 parameters have been moved. */
4759 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4761 emit_move_insn (tempreg, validize_mem (entry_parm));
4763 push_to_sequence (conversion_insns);
4764 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4766 if (GET_CODE (tempreg) == SUBREG
4767 && GET_MODE (tempreg) == nominal_mode
4768 && GET_CODE (SUBREG_REG (tempreg)) == REG
4769 && nominal_mode == passed_mode
4770 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4771 && GET_MODE_SIZE (GET_MODE (tempreg))
4772 < GET_MODE_SIZE (GET_MODE (entry_parm)))
4774 /* The argument is already sign/zero extended, so note it
4775 into the subreg. */
4776 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4777 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4780 /* TREE_USED gets set erroneously during expand_assignment. */
4781 save_tree_used = TREE_USED (parm);
4782 expand_assignment (parm,
4783 make_tree (nominal_type, tempreg), 0, 0);
4784 TREE_USED (parm) = save_tree_used;
4785 conversion_insns = get_insns ();
4786 did_conversion = 1;
4787 end_sequence ();
4789 else
4790 emit_move_insn (parmreg, validize_mem (entry_parm));
4792 /* If we were passed a pointer but the actual value
4793 can safely live in a register, put it in one. */
4794 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4795 /* If by-reference argument was promoted, demote it. */
4796 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4797 || ! ((! optimize
4798 && ! DECL_REGISTER (parm))
4799 || TREE_SIDE_EFFECTS (parm)
4800 /* If -ffloat-store specified, don't put explicit
4801 float variables into registers. */
4802 || (flag_float_store
4803 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4805 /* We can't use nominal_mode, because it will have been set to
4806 Pmode above. We must use the actual mode of the parm. */
4807 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4808 mark_user_reg (parmreg);
4809 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4811 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4812 int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4813 push_to_sequence (conversion_insns);
4814 emit_move_insn (tempreg, DECL_RTL (parm));
4815 SET_DECL_RTL (parm,
4816 convert_to_mode (GET_MODE (parmreg),
4817 tempreg,
4818 unsigned_p));
4819 emit_move_insn (parmreg, DECL_RTL (parm));
4820 conversion_insns = get_insns();
4821 did_conversion = 1;
4822 end_sequence ();
4824 else
4825 emit_move_insn (parmreg, DECL_RTL (parm));
4826 SET_DECL_RTL (parm, parmreg);
4827 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4828 now the parm. */
4829 stack_parm = 0;
4831 #ifdef FUNCTION_ARG_CALLEE_COPIES
4832 /* If we are passed an arg by reference and it is our responsibility
4833 to make a copy, do it now.
4834 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4835 original argument, so we must recreate them in the call to
4836 FUNCTION_ARG_CALLEE_COPIES. */
4837 /* ??? Later add code to handle the case that if the argument isn't
4838 modified, don't do the copy. */
4840 else if (passed_pointer
4841 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4842 TYPE_MODE (DECL_ARG_TYPE (parm)),
4843 DECL_ARG_TYPE (parm),
4844 named_arg)
4845 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4847 rtx copy;
4848 tree type = DECL_ARG_TYPE (parm);
4850 /* This sequence may involve a library call perhaps clobbering
4851 registers that haven't been copied to pseudos yet. */
4853 push_to_sequence (conversion_insns);
4855 if (!COMPLETE_TYPE_P (type)
4856 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4857 /* This is a variable sized object. */
4858 copy = gen_rtx_MEM (BLKmode,
4859 allocate_dynamic_stack_space
4860 (expr_size (parm), NULL_RTX,
4861 TYPE_ALIGN (type)));
4862 else
4863 copy = assign_stack_temp (TYPE_MODE (type),
4864 int_size_in_bytes (type), 1);
4865 set_mem_attributes (copy, parm, 1);
4867 store_expr (parm, copy, 0);
4868 emit_move_insn (parmreg, XEXP (copy, 0));
4869 conversion_insns = get_insns ();
4870 did_conversion = 1;
4871 end_sequence ();
4873 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4875 /* In any case, record the parm's desired stack location
4876 in case we later discover it must live in the stack.
4878 If it is a COMPLEX value, store the stack location for both
4879 halves. */
4881 if (GET_CODE (parmreg) == CONCAT)
4882 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4883 else
4884 regno = REGNO (parmreg);
4886 if (regno >= max_parm_reg)
4888 rtx *new;
4889 int old_max_parm_reg = max_parm_reg;
4891 /* It's slow to expand this one register at a time,
4892 but it's also rare and we need max_parm_reg to be
4893 precisely correct. */
4894 max_parm_reg = regno + 1;
4895 new = (rtx *) ggc_realloc (parm_reg_stack_loc,
4896 max_parm_reg * sizeof (rtx));
4897 memset ((char *) (new + old_max_parm_reg), 0,
4898 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4899 parm_reg_stack_loc = new;
4902 if (GET_CODE (parmreg) == CONCAT)
4904 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4906 regnor = REGNO (gen_realpart (submode, parmreg));
4907 regnoi = REGNO (gen_imagpart (submode, parmreg));
4909 if (stack_parm != 0)
4911 parm_reg_stack_loc[regnor]
4912 = gen_realpart (submode, stack_parm);
4913 parm_reg_stack_loc[regnoi]
4914 = gen_imagpart (submode, stack_parm);
4916 else
4918 parm_reg_stack_loc[regnor] = 0;
4919 parm_reg_stack_loc[regnoi] = 0;
4922 else
4923 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4925 /* Mark the register as eliminable if we did no conversion
4926 and it was copied from memory at a fixed offset,
4927 and the arg pointer was not copied to a pseudo-reg.
4928 If the arg pointer is a pseudo reg or the offset formed
4929 an invalid address, such memory-equivalences
4930 as we make here would screw up life analysis for it. */
4931 if (nominal_mode == passed_mode
4932 && ! did_conversion
4933 && stack_parm != 0
4934 && GET_CODE (stack_parm) == MEM
4935 && stack_offset.var == 0
4936 && reg_mentioned_p (virtual_incoming_args_rtx,
4937 XEXP (stack_parm, 0)))
4939 rtx linsn = get_last_insn ();
4940 rtx sinsn, set;
4942 /* Mark complex types separately. */
4943 if (GET_CODE (parmreg) == CONCAT)
4944 /* Scan backwards for the set of the real and
4945 imaginary parts. */
4946 for (sinsn = linsn; sinsn != 0;
4947 sinsn = prev_nonnote_insn (sinsn))
4949 set = single_set (sinsn);
4950 if (set != 0
4951 && SET_DEST (set) == regno_reg_rtx [regnoi])
4952 REG_NOTES (sinsn)
4953 = gen_rtx_EXPR_LIST (REG_EQUIV,
4954 parm_reg_stack_loc[regnoi],
4955 REG_NOTES (sinsn));
4956 else if (set != 0
4957 && SET_DEST (set) == regno_reg_rtx [regnor])
4958 REG_NOTES (sinsn)
4959 = gen_rtx_EXPR_LIST (REG_EQUIV,
4960 parm_reg_stack_loc[regnor],
4961 REG_NOTES (sinsn));
4963 else if ((set = single_set (linsn)) != 0
4964 && SET_DEST (set) == parmreg)
4965 REG_NOTES (linsn)
4966 = gen_rtx_EXPR_LIST (REG_EQUIV,
4967 stack_parm, REG_NOTES (linsn));
4970 /* For pointer data type, suggest pointer register. */
4971 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4972 mark_reg_pointer (parmreg,
4973 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4975 /* If something wants our address, try to use ADDRESSOF. */
4976 if (TREE_ADDRESSABLE (parm))
4978 /* If we end up putting something into the stack,
4979 fixup_var_refs_insns will need to make a pass over
4980 all the instructions. It looks through the pending
4981 sequences -- but it can't see the ones in the
4982 CONVERSION_INSNS, if they're not on the sequence
4983 stack. So, we go back to that sequence, just so that
4984 the fixups will happen. */
4985 push_to_sequence (conversion_insns);
4986 put_var_into_stack (parm);
4987 conversion_insns = get_insns ();
4988 end_sequence ();
4991 else
4993 /* Value must be stored in the stack slot STACK_PARM
4994 during function execution. */
4996 if (promoted_mode != nominal_mode)
4998 /* Conversion is required. */
4999 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5001 emit_move_insn (tempreg, validize_mem (entry_parm));
5003 push_to_sequence (conversion_insns);
5004 entry_parm = convert_to_mode (nominal_mode, tempreg,
5005 TREE_UNSIGNED (TREE_TYPE (parm)));
5006 if (stack_parm)
5007 /* ??? This may need a big-endian conversion on sparc64. */
5008 stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5010 conversion_insns = get_insns ();
5011 did_conversion = 1;
5012 end_sequence ();
5015 if (entry_parm != stack_parm)
5017 if (stack_parm == 0)
5019 stack_parm
5020 = assign_stack_local (GET_MODE (entry_parm),
5021 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5022 set_mem_attributes (stack_parm, parm, 1);
5025 if (promoted_mode != nominal_mode)
5027 push_to_sequence (conversion_insns);
5028 emit_move_insn (validize_mem (stack_parm),
5029 validize_mem (entry_parm));
5030 conversion_insns = get_insns ();
5031 end_sequence ();
5033 else
5034 emit_move_insn (validize_mem (stack_parm),
5035 validize_mem (entry_parm));
5038 SET_DECL_RTL (parm, stack_parm);
5041 /* If this "parameter" was the place where we are receiving the
5042 function's incoming structure pointer, set up the result. */
5043 if (parm == function_result_decl)
5045 tree result = DECL_RESULT (fndecl);
5046 rtx addr = DECL_RTL (parm);
5047 rtx x;
5049 #ifdef POINTERS_EXTEND_UNSIGNED
5050 if (GET_MODE (addr) != Pmode)
5051 addr = convert_memory_address (Pmode, addr);
5052 #endif
5054 x = gen_rtx_MEM (DECL_MODE (result), addr);
5055 set_mem_attributes (x, result, 1);
5056 SET_DECL_RTL (result, x);
5059 if (GET_CODE (DECL_RTL (parm)) == REG)
5060 REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5061 else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5063 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5064 REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5069 /* Output all parameter conversion instructions (possibly including calls)
5070 now that all parameters have been copied out of hard registers. */
5071 emit_insn (conversion_insns);
5073 last_parm_insn = get_last_insn ();
5075 current_function_args_size = stack_args_size.constant;
5077 /* Adjust function incoming argument size for alignment and
5078 minimum length. */
5080 #ifdef REG_PARM_STACK_SPACE
5081 #ifndef MAYBE_REG_PARM_STACK_SPACE
5082 current_function_args_size = MAX (current_function_args_size,
5083 REG_PARM_STACK_SPACE (fndecl));
5084 #endif
5085 #endif
5087 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5089 current_function_args_size
5090 = ((current_function_args_size + STACK_BYTES - 1)
5091 / STACK_BYTES) * STACK_BYTES;
5093 #ifdef ARGS_GROW_DOWNWARD
5094 current_function_arg_offset_rtx
5095 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5096 : expand_expr (size_diffop (stack_args_size.var,
5097 size_int (-stack_args_size.constant)),
5098 NULL_RTX, VOIDmode, 0));
5099 #else
5100 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5101 #endif
5103 /* See how many bytes, if any, of its args a function should try to pop
5104 on return. */
5106 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5107 current_function_args_size);
5109 /* For stdarg.h function, save info about
5110 regs and stack space used by the named args. */
5112 current_function_args_info = args_so_far;
5114 /* Set the rtx used for the function return value. Put this in its
5115 own variable so any optimizers that need this information don't have
5116 to include tree.h. Do this here so it gets done when an inlined
5117 function gets output. */
5119 current_function_return_rtx
5120 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5121 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5123 /* If scalar return value was computed in a pseudo-reg, or was a named
5124 return value that got dumped to the stack, copy that to the hard
5125 return register. */
5126 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5128 tree decl_result = DECL_RESULT (fndecl);
5129 rtx decl_rtl = DECL_RTL (decl_result);
5131 if (REG_P (decl_rtl)
5132 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5133 : DECL_REGISTER (decl_result))
5135 rtx real_decl_rtl;
5137 #ifdef FUNCTION_OUTGOING_VALUE
5138 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5139 fndecl);
5140 #else
5141 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5142 fndecl);
5143 #endif
5144 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5145 /* The delay slot scheduler assumes that current_function_return_rtx
5146 holds the hard register containing the return value, not a
5147 temporary pseudo. */
5148 current_function_return_rtx = real_decl_rtl;
5153 /* Indicate whether REGNO is an incoming argument to the current function
5154 that was promoted to a wider mode. If so, return the RTX for the
5155 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5156 that REGNO is promoted from and whether the promotion was signed or
5157 unsigned. */
5159 #ifdef PROMOTE_FUNCTION_ARGS
5162 promoted_input_arg (regno, pmode, punsignedp)
5163 unsigned int regno;
5164 enum machine_mode *pmode;
5165 int *punsignedp;
5167 tree arg;
5169 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5170 arg = TREE_CHAIN (arg))
5171 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5172 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5173 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5175 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5176 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5178 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5179 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5180 && mode != DECL_MODE (arg))
5182 *pmode = DECL_MODE (arg);
5183 *punsignedp = unsignedp;
5184 return DECL_INCOMING_RTL (arg);
5188 return 0;
5191 #endif
5193 /* Compute the size and offset from the start of the stacked arguments for a
5194 parm passed in mode PASSED_MODE and with type TYPE.
5196 INITIAL_OFFSET_PTR points to the current offset into the stacked
5197 arguments.
5199 The starting offset and size for this parm are returned in *OFFSET_PTR
5200 and *ARG_SIZE_PTR, respectively.
5202 IN_REGS is non-zero if the argument will be passed in registers. It will
5203 never be set if REG_PARM_STACK_SPACE is not defined.
5205 FNDECL is the function in which the argument was defined.
5207 There are two types of rounding that are done. The first, controlled by
5208 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5209 list to be aligned to the specific boundary (in bits). This rounding
5210 affects the initial and starting offsets, but not the argument size.
5212 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5213 optionally rounds the size of the parm to PARM_BOUNDARY. The
5214 initial offset is not affected by this rounding, while the size always
5215 is and the starting offset may be. */
5217 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5218 initial_offset_ptr is positive because locate_and_pad_parm's
5219 callers pass in the total size of args so far as
5220 initial_offset_ptr. arg_size_ptr is always positive. */
5222 void
5223 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5224 initial_offset_ptr, offset_ptr, arg_size_ptr,
5225 alignment_pad)
5226 enum machine_mode passed_mode;
5227 tree type;
5228 int in_regs ATTRIBUTE_UNUSED;
5229 tree fndecl ATTRIBUTE_UNUSED;
5230 struct args_size *initial_offset_ptr;
5231 struct args_size *offset_ptr;
5232 struct args_size *arg_size_ptr;
5233 struct args_size *alignment_pad;
5236 tree sizetree
5237 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5238 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5239 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5241 #ifdef REG_PARM_STACK_SPACE
5242 /* If we have found a stack parm before we reach the end of the
5243 area reserved for registers, skip that area. */
5244 if (! in_regs)
5246 int reg_parm_stack_space = 0;
5248 #ifdef MAYBE_REG_PARM_STACK_SPACE
5249 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5250 #else
5251 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5252 #endif
5253 if (reg_parm_stack_space > 0)
5255 if (initial_offset_ptr->var)
5257 initial_offset_ptr->var
5258 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5259 ssize_int (reg_parm_stack_space));
5260 initial_offset_ptr->constant = 0;
5262 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5263 initial_offset_ptr->constant = reg_parm_stack_space;
5266 #endif /* REG_PARM_STACK_SPACE */
5268 arg_size_ptr->var = 0;
5269 arg_size_ptr->constant = 0;
5270 alignment_pad->var = 0;
5271 alignment_pad->constant = 0;
5273 #ifdef ARGS_GROW_DOWNWARD
5274 if (initial_offset_ptr->var)
5276 offset_ptr->constant = 0;
5277 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5278 initial_offset_ptr->var);
5280 else
5282 offset_ptr->constant = -initial_offset_ptr->constant;
5283 offset_ptr->var = 0;
5285 if (where_pad != none
5286 && (!host_integerp (sizetree, 1)
5287 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5288 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5289 SUB_PARM_SIZE (*offset_ptr, sizetree);
5290 if (where_pad != downward)
5291 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5292 if (initial_offset_ptr->var)
5293 arg_size_ptr->var = size_binop (MINUS_EXPR,
5294 size_binop (MINUS_EXPR,
5295 ssize_int (0),
5296 initial_offset_ptr->var),
5297 offset_ptr->var);
5299 else
5300 arg_size_ptr->constant = (-initial_offset_ptr->constant
5301 - offset_ptr->constant);
5303 #else /* !ARGS_GROW_DOWNWARD */
5304 if (!in_regs
5305 #ifdef REG_PARM_STACK_SPACE
5306 || REG_PARM_STACK_SPACE (fndecl) > 0
5307 #endif
5309 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5310 *offset_ptr = *initial_offset_ptr;
5312 #ifdef PUSH_ROUNDING
5313 if (passed_mode != BLKmode)
5314 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5315 #endif
5317 /* Pad_below needs the pre-rounded size to know how much to pad below
5318 so this must be done before rounding up. */
5319 if (where_pad == downward
5320 /* However, BLKmode args passed in regs have their padding done elsewhere.
5321 The stack slot must be able to hold the entire register. */
5322 && !(in_regs && passed_mode == BLKmode))
5323 pad_below (offset_ptr, passed_mode, sizetree);
5325 if (where_pad != none
5326 && (!host_integerp (sizetree, 1)
5327 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5328 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5330 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5331 #endif /* ARGS_GROW_DOWNWARD */
5334 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5335 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5337 static void
5338 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5339 struct args_size *offset_ptr;
5340 int boundary;
5341 struct args_size *alignment_pad;
5343 tree save_var = NULL_TREE;
5344 HOST_WIDE_INT save_constant = 0;
5346 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5348 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5350 save_var = offset_ptr->var;
5351 save_constant = offset_ptr->constant;
5354 alignment_pad->var = NULL_TREE;
5355 alignment_pad->constant = 0;
5357 if (boundary > BITS_PER_UNIT)
5359 if (offset_ptr->var)
5361 offset_ptr->var =
5362 #ifdef ARGS_GROW_DOWNWARD
5363 round_down
5364 #else
5365 round_up
5366 #endif
5367 (ARGS_SIZE_TREE (*offset_ptr),
5368 boundary / BITS_PER_UNIT);
5369 offset_ptr->constant = 0; /*?*/
5370 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5371 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5372 save_var);
5374 else
5376 offset_ptr->constant =
5377 #ifdef ARGS_GROW_DOWNWARD
5378 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5379 #else
5380 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5381 #endif
5382 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5383 alignment_pad->constant = offset_ptr->constant - save_constant;
5388 #ifndef ARGS_GROW_DOWNWARD
5389 static void
5390 pad_below (offset_ptr, passed_mode, sizetree)
5391 struct args_size *offset_ptr;
5392 enum machine_mode passed_mode;
5393 tree sizetree;
5395 if (passed_mode != BLKmode)
5397 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5398 offset_ptr->constant
5399 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5400 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5401 - GET_MODE_SIZE (passed_mode));
5403 else
5405 if (TREE_CODE (sizetree) != INTEGER_CST
5406 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5408 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5409 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5410 /* Add it in. */
5411 ADD_PARM_SIZE (*offset_ptr, s2);
5412 SUB_PARM_SIZE (*offset_ptr, sizetree);
5416 #endif
5418 /* Walk the tree of blocks describing the binding levels within a function
5419 and warn about uninitialized variables.
5420 This is done after calling flow_analysis and before global_alloc
5421 clobbers the pseudo-regs to hard regs. */
5423 void
5424 uninitialized_vars_warning (block)
5425 tree block;
5427 tree decl, sub;
5428 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5430 if (warn_uninitialized
5431 && TREE_CODE (decl) == VAR_DECL
5432 /* These warnings are unreliable for and aggregates
5433 because assigning the fields one by one can fail to convince
5434 flow.c that the entire aggregate was initialized.
5435 Unions are troublesome because members may be shorter. */
5436 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5437 && DECL_RTL (decl) != 0
5438 && GET_CODE (DECL_RTL (decl)) == REG
5439 /* Global optimizations can make it difficult to determine if a
5440 particular variable has been initialized. However, a VAR_DECL
5441 with a nonzero DECL_INITIAL had an initializer, so do not
5442 claim it is potentially uninitialized.
5444 We do not care about the actual value in DECL_INITIAL, so we do
5445 not worry that it may be a dangling pointer. */
5446 && DECL_INITIAL (decl) == NULL_TREE
5447 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5448 warning_with_decl (decl,
5449 "`%s' might be used uninitialized in this function");
5450 if (extra_warnings
5451 && TREE_CODE (decl) == VAR_DECL
5452 && DECL_RTL (decl) != 0
5453 && GET_CODE (DECL_RTL (decl)) == REG
5454 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5455 warning_with_decl (decl,
5456 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5458 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5459 uninitialized_vars_warning (sub);
5462 /* Do the appropriate part of uninitialized_vars_warning
5463 but for arguments instead of local variables. */
5465 void
5466 setjmp_args_warning ()
5468 tree decl;
5469 for (decl = DECL_ARGUMENTS (current_function_decl);
5470 decl; decl = TREE_CHAIN (decl))
5471 if (DECL_RTL (decl) != 0
5472 && GET_CODE (DECL_RTL (decl)) == REG
5473 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5474 warning_with_decl (decl,
5475 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5478 /* If this function call setjmp, put all vars into the stack
5479 unless they were declared `register'. */
5481 void
5482 setjmp_protect (block)
5483 tree block;
5485 tree decl, sub;
5486 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5487 if ((TREE_CODE (decl) == VAR_DECL
5488 || TREE_CODE (decl) == PARM_DECL)
5489 && DECL_RTL (decl) != 0
5490 && (GET_CODE (DECL_RTL (decl)) == REG
5491 || (GET_CODE (DECL_RTL (decl)) == MEM
5492 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5493 /* If this variable came from an inline function, it must be
5494 that its life doesn't overlap the setjmp. If there was a
5495 setjmp in the function, it would already be in memory. We
5496 must exclude such variable because their DECL_RTL might be
5497 set to strange things such as virtual_stack_vars_rtx. */
5498 && ! DECL_FROM_INLINE (decl)
5499 && (
5500 #ifdef NON_SAVING_SETJMP
5501 /* If longjmp doesn't restore the registers,
5502 don't put anything in them. */
5503 NON_SAVING_SETJMP
5505 #endif
5506 ! DECL_REGISTER (decl)))
5507 put_var_into_stack (decl);
5508 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5509 setjmp_protect (sub);
5512 /* Like the previous function, but for args instead of local variables. */
5514 void
5515 setjmp_protect_args ()
5517 tree decl;
5518 for (decl = DECL_ARGUMENTS (current_function_decl);
5519 decl; decl = TREE_CHAIN (decl))
5520 if ((TREE_CODE (decl) == VAR_DECL
5521 || TREE_CODE (decl) == PARM_DECL)
5522 && DECL_RTL (decl) != 0
5523 && (GET_CODE (DECL_RTL (decl)) == REG
5524 || (GET_CODE (DECL_RTL (decl)) == MEM
5525 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5526 && (
5527 /* If longjmp doesn't restore the registers,
5528 don't put anything in them. */
5529 #ifdef NON_SAVING_SETJMP
5530 NON_SAVING_SETJMP
5532 #endif
5533 ! DECL_REGISTER (decl)))
5534 put_var_into_stack (decl);
5537 /* Return the context-pointer register corresponding to DECL,
5538 or 0 if it does not need one. */
5541 lookup_static_chain (decl)
5542 tree decl;
5544 tree context = decl_function_context (decl);
5545 tree link;
5547 if (context == 0
5548 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5549 return 0;
5551 /* We treat inline_function_decl as an alias for the current function
5552 because that is the inline function whose vars, types, etc.
5553 are being merged into the current function.
5554 See expand_inline_function. */
5555 if (context == current_function_decl || context == inline_function_decl)
5556 return virtual_stack_vars_rtx;
5558 for (link = context_display; link; link = TREE_CHAIN (link))
5559 if (TREE_PURPOSE (link) == context)
5560 return RTL_EXPR_RTL (TREE_VALUE (link));
5562 abort ();
5565 /* Convert a stack slot address ADDR for variable VAR
5566 (from a containing function)
5567 into an address valid in this function (using a static chain). */
5570 fix_lexical_addr (addr, var)
5571 rtx addr;
5572 tree var;
5574 rtx basereg;
5575 HOST_WIDE_INT displacement;
5576 tree context = decl_function_context (var);
5577 struct function *fp;
5578 rtx base = 0;
5580 /* If this is the present function, we need not do anything. */
5581 if (context == current_function_decl || context == inline_function_decl)
5582 return addr;
5584 fp = find_function_data (context);
5586 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5587 addr = XEXP (XEXP (addr, 0), 0);
5589 /* Decode given address as base reg plus displacement. */
5590 if (GET_CODE (addr) == REG)
5591 basereg = addr, displacement = 0;
5592 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5593 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5594 else
5595 abort ();
5597 /* We accept vars reached via the containing function's
5598 incoming arg pointer and via its stack variables pointer. */
5599 if (basereg == fp->internal_arg_pointer)
5601 /* If reached via arg pointer, get the arg pointer value
5602 out of that function's stack frame.
5604 There are two cases: If a separate ap is needed, allocate a
5605 slot in the outer function for it and dereference it that way.
5606 This is correct even if the real ap is actually a pseudo.
5607 Otherwise, just adjust the offset from the frame pointer to
5608 compensate. */
5610 #ifdef NEED_SEPARATE_AP
5611 rtx addr;
5613 addr = get_arg_pointer_save_area (fp);
5614 addr = fix_lexical_addr (XEXP (addr, 0), var);
5615 addr = memory_address (Pmode, addr);
5617 base = gen_rtx_MEM (Pmode, addr);
5618 set_mem_alias_set (base, get_frame_alias_set ());
5619 base = copy_to_reg (base);
5620 #else
5621 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5622 base = lookup_static_chain (var);
5623 #endif
5626 else if (basereg == virtual_stack_vars_rtx)
5628 /* This is the same code as lookup_static_chain, duplicated here to
5629 avoid an extra call to decl_function_context. */
5630 tree link;
5632 for (link = context_display; link; link = TREE_CHAIN (link))
5633 if (TREE_PURPOSE (link) == context)
5635 base = RTL_EXPR_RTL (TREE_VALUE (link));
5636 break;
5640 if (base == 0)
5641 abort ();
5643 /* Use same offset, relative to appropriate static chain or argument
5644 pointer. */
5645 return plus_constant (base, displacement);
5648 /* Return the address of the trampoline for entering nested fn FUNCTION.
5649 If necessary, allocate a trampoline (in the stack frame)
5650 and emit rtl to initialize its contents (at entry to this function). */
5653 trampoline_address (function)
5654 tree function;
5656 tree link;
5657 tree rtlexp;
5658 rtx tramp;
5659 struct function *fp;
5660 tree fn_context;
5662 /* Find an existing trampoline and return it. */
5663 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5664 if (TREE_PURPOSE (link) == function)
5665 return
5666 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5668 for (fp = outer_function_chain; fp; fp = fp->outer)
5669 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5670 if (TREE_PURPOSE (link) == function)
5672 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5673 function);
5674 return adjust_trampoline_addr (tramp);
5677 /* None exists; we must make one. */
5679 /* Find the `struct function' for the function containing FUNCTION. */
5680 fp = 0;
5681 fn_context = decl_function_context (function);
5682 if (fn_context != current_function_decl
5683 && fn_context != inline_function_decl)
5684 fp = find_function_data (fn_context);
5686 /* Allocate run-time space for this trampoline
5687 (usually in the defining function's stack frame). */
5688 #ifdef ALLOCATE_TRAMPOLINE
5689 tramp = ALLOCATE_TRAMPOLINE (fp);
5690 #else
5691 /* If rounding needed, allocate extra space
5692 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5693 #ifdef TRAMPOLINE_ALIGNMENT
5694 #define TRAMPOLINE_REAL_SIZE \
5695 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5696 #else
5697 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5698 #endif
5699 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5700 fp ? fp : cfun);
5701 #endif
5703 /* Record the trampoline for reuse and note it for later initialization
5704 by expand_function_end. */
5705 if (fp != 0)
5707 rtlexp = make_node (RTL_EXPR);
5708 RTL_EXPR_RTL (rtlexp) = tramp;
5709 fp->x_trampoline_list = tree_cons (function, rtlexp,
5710 fp->x_trampoline_list);
5712 else
5714 /* Make the RTL_EXPR node temporary, not momentary, so that the
5715 trampoline_list doesn't become garbage. */
5716 rtlexp = make_node (RTL_EXPR);
5718 RTL_EXPR_RTL (rtlexp) = tramp;
5719 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5722 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5723 return adjust_trampoline_addr (tramp);
5726 /* Given a trampoline address,
5727 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5729 static rtx
5730 round_trampoline_addr (tramp)
5731 rtx tramp;
5733 #ifdef TRAMPOLINE_ALIGNMENT
5734 /* Round address up to desired boundary. */
5735 rtx temp = gen_reg_rtx (Pmode);
5736 rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5737 rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5739 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5740 temp, 0, OPTAB_LIB_WIDEN);
5741 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5742 temp, 0, OPTAB_LIB_WIDEN);
5743 #endif
5744 return tramp;
5747 /* Given a trampoline address, round it then apply any
5748 platform-specific adjustments so that the result can be used for a
5749 function call . */
5751 static rtx
5752 adjust_trampoline_addr (tramp)
5753 rtx tramp;
5755 tramp = round_trampoline_addr (tramp);
5756 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5757 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5758 #endif
5759 return tramp;
5762 /* Put all this function's BLOCK nodes including those that are chained
5763 onto the first block into a vector, and return it.
5764 Also store in each NOTE for the beginning or end of a block
5765 the index of that block in the vector.
5766 The arguments are BLOCK, the chain of top-level blocks of the function,
5767 and INSNS, the insn chain of the function. */
5769 void
5770 identify_blocks ()
5772 int n_blocks;
5773 tree *block_vector, *last_block_vector;
5774 tree *block_stack;
5775 tree block = DECL_INITIAL (current_function_decl);
5777 if (block == 0)
5778 return;
5780 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5781 depth-first order. */
5782 block_vector = get_block_vector (block, &n_blocks);
5783 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5785 last_block_vector = identify_blocks_1 (get_insns (),
5786 block_vector + 1,
5787 block_vector + n_blocks,
5788 block_stack);
5790 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5791 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5792 if (0 && last_block_vector != block_vector + n_blocks)
5793 abort ();
5795 free (block_vector);
5796 free (block_stack);
5799 /* Subroutine of identify_blocks. Do the block substitution on the
5800 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5802 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5803 BLOCK_VECTOR is incremented for each block seen. */
5805 static tree *
5806 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5807 rtx insns;
5808 tree *block_vector;
5809 tree *end_block_vector;
5810 tree *orig_block_stack;
5812 rtx insn;
5813 tree *block_stack = orig_block_stack;
5815 for (insn = insns; insn; insn = NEXT_INSN (insn))
5817 if (GET_CODE (insn) == NOTE)
5819 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5821 tree b;
5823 /* If there are more block notes than BLOCKs, something
5824 is badly wrong. */
5825 if (block_vector == end_block_vector)
5826 abort ();
5828 b = *block_vector++;
5829 NOTE_BLOCK (insn) = b;
5830 *block_stack++ = b;
5832 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5834 /* If there are more NOTE_INSN_BLOCK_ENDs than
5835 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5836 if (block_stack == orig_block_stack)
5837 abort ();
5839 NOTE_BLOCK (insn) = *--block_stack;
5842 else if (GET_CODE (insn) == CALL_INSN
5843 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5845 rtx cp = PATTERN (insn);
5847 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5848 end_block_vector, block_stack);
5849 if (XEXP (cp, 1))
5850 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5851 end_block_vector, block_stack);
5852 if (XEXP (cp, 2))
5853 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5854 end_block_vector, block_stack);
5858 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5859 something is badly wrong. */
5860 if (block_stack != orig_block_stack)
5861 abort ();
5863 return block_vector;
5866 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5867 and create duplicate blocks. */
5868 /* ??? Need an option to either create block fragments or to create
5869 abstract origin duplicates of a source block. It really depends
5870 on what optimization has been performed. */
5872 void
5873 reorder_blocks ()
5875 tree block = DECL_INITIAL (current_function_decl);
5876 varray_type block_stack;
5878 if (block == NULL_TREE)
5879 return;
5881 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5883 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5884 reorder_blocks_0 (block);
5886 /* Prune the old trees away, so that they don't get in the way. */
5887 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5888 BLOCK_CHAIN (block) = NULL_TREE;
5890 /* Recreate the block tree from the note nesting. */
5891 reorder_blocks_1 (get_insns (), block, &block_stack);
5892 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5894 /* Remove deleted blocks from the block fragment chains. */
5895 reorder_fix_fragments (block);
5898 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
5900 static void
5901 reorder_blocks_0 (block)
5902 tree block;
5904 while (block)
5906 TREE_ASM_WRITTEN (block) = 0;
5907 reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5908 block = BLOCK_CHAIN (block);
5912 static void
5913 reorder_blocks_1 (insns, current_block, p_block_stack)
5914 rtx insns;
5915 tree current_block;
5916 varray_type *p_block_stack;
5918 rtx insn;
5920 for (insn = insns; insn; insn = NEXT_INSN (insn))
5922 if (GET_CODE (insn) == NOTE)
5924 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5926 tree block = NOTE_BLOCK (insn);
5928 /* If we have seen this block before, that means it now
5929 spans multiple address regions. Create a new fragment. */
5930 if (TREE_ASM_WRITTEN (block))
5932 tree new_block = copy_node (block);
5933 tree origin;
5935 origin = (BLOCK_FRAGMENT_ORIGIN (block)
5936 ? BLOCK_FRAGMENT_ORIGIN (block)
5937 : block);
5938 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
5939 BLOCK_FRAGMENT_CHAIN (new_block)
5940 = BLOCK_FRAGMENT_CHAIN (origin);
5941 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
5943 NOTE_BLOCK (insn) = new_block;
5944 block = new_block;
5947 BLOCK_SUBBLOCKS (block) = 0;
5948 TREE_ASM_WRITTEN (block) = 1;
5949 BLOCK_SUPERCONTEXT (block) = current_block;
5950 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5951 BLOCK_SUBBLOCKS (current_block) = block;
5952 current_block = block;
5953 VARRAY_PUSH_TREE (*p_block_stack, block);
5955 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5957 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5958 VARRAY_POP (*p_block_stack);
5959 BLOCK_SUBBLOCKS (current_block)
5960 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5961 current_block = BLOCK_SUPERCONTEXT (current_block);
5964 else if (GET_CODE (insn) == CALL_INSN
5965 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5967 rtx cp = PATTERN (insn);
5968 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5969 if (XEXP (cp, 1))
5970 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5971 if (XEXP (cp, 2))
5972 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5977 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
5978 appears in the block tree, select one of the fragments to become
5979 the new origin block. */
5981 static void
5982 reorder_fix_fragments (block)
5983 tree block;
5985 while (block)
5987 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
5988 tree new_origin = NULL_TREE;
5990 if (dup_origin)
5992 if (! TREE_ASM_WRITTEN (dup_origin))
5994 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
5996 /* Find the first of the remaining fragments. There must
5997 be at least one -- the current block. */
5998 while (! TREE_ASM_WRITTEN (new_origin))
5999 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6000 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6003 else if (! dup_origin)
6004 new_origin = block;
6006 /* Re-root the rest of the fragments to the new origin. In the
6007 case that DUP_ORIGIN was null, that means BLOCK was the origin
6008 of a chain of fragments and we want to remove those fragments
6009 that didn't make it to the output. */
6010 if (new_origin)
6012 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6013 tree chain = *pp;
6015 while (chain)
6017 if (TREE_ASM_WRITTEN (chain))
6019 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6020 *pp = chain;
6021 pp = &BLOCK_FRAGMENT_CHAIN (chain);
6023 chain = BLOCK_FRAGMENT_CHAIN (chain);
6025 *pp = NULL_TREE;
6028 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6029 block = BLOCK_CHAIN (block);
6033 /* Reverse the order of elements in the chain T of blocks,
6034 and return the new head of the chain (old last element). */
6036 static tree
6037 blocks_nreverse (t)
6038 tree t;
6040 tree prev = 0, decl, next;
6041 for (decl = t; decl; decl = next)
6043 next = BLOCK_CHAIN (decl);
6044 BLOCK_CHAIN (decl) = prev;
6045 prev = decl;
6047 return prev;
6050 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6051 non-NULL, list them all into VECTOR, in a depth-first preorder
6052 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6053 blocks. */
6055 static int
6056 all_blocks (block, vector)
6057 tree block;
6058 tree *vector;
6060 int n_blocks = 0;
6062 while (block)
6064 TREE_ASM_WRITTEN (block) = 0;
6066 /* Record this block. */
6067 if (vector)
6068 vector[n_blocks] = block;
6070 ++n_blocks;
6072 /* Record the subblocks, and their subblocks... */
6073 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6074 vector ? vector + n_blocks : 0);
6075 block = BLOCK_CHAIN (block);
6078 return n_blocks;
6081 /* Return a vector containing all the blocks rooted at BLOCK. The
6082 number of elements in the vector is stored in N_BLOCKS_P. The
6083 vector is dynamically allocated; it is the caller's responsibility
6084 to call `free' on the pointer returned. */
6086 static tree *
6087 get_block_vector (block, n_blocks_p)
6088 tree block;
6089 int *n_blocks_p;
6091 tree *block_vector;
6093 *n_blocks_p = all_blocks (block, NULL);
6094 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6095 all_blocks (block, block_vector);
6097 return block_vector;
6100 static int next_block_index = 2;
6102 /* Set BLOCK_NUMBER for all the blocks in FN. */
6104 void
6105 number_blocks (fn)
6106 tree fn;
6108 int i;
6109 int n_blocks;
6110 tree *block_vector;
6112 /* For SDB and XCOFF debugging output, we start numbering the blocks
6113 from 1 within each function, rather than keeping a running
6114 count. */
6115 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6116 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6117 next_block_index = 1;
6118 #endif
6120 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6122 /* The top-level BLOCK isn't numbered at all. */
6123 for (i = 1; i < n_blocks; ++i)
6124 /* We number the blocks from two. */
6125 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6127 free (block_vector);
6129 return;
6132 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6134 tree
6135 debug_find_var_in_block_tree (var, block)
6136 tree var;
6137 tree block;
6139 tree t;
6141 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6142 if (t == var)
6143 return block;
6145 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6147 tree ret = debug_find_var_in_block_tree (var, t);
6148 if (ret)
6149 return ret;
6152 return NULL_TREE;
6155 /* Allocate a function structure and reset its contents to the defaults. */
6157 static void
6158 prepare_function_start ()
6160 cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6162 init_stmt_for_function ();
6163 init_eh_for_function ();
6165 cse_not_expected = ! optimize;
6167 /* Caller save not needed yet. */
6168 caller_save_needed = 0;
6170 /* No stack slots have been made yet. */
6171 stack_slot_list = 0;
6173 current_function_has_nonlocal_label = 0;
6174 current_function_has_nonlocal_goto = 0;
6176 /* There is no stack slot for handling nonlocal gotos. */
6177 nonlocal_goto_handler_slots = 0;
6178 nonlocal_goto_stack_level = 0;
6180 /* No labels have been declared for nonlocal use. */
6181 nonlocal_labels = 0;
6182 nonlocal_goto_handler_labels = 0;
6184 /* No function calls so far in this function. */
6185 function_call_count = 0;
6187 /* No parm regs have been allocated.
6188 (This is important for output_inline_function.) */
6189 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6191 /* Initialize the RTL mechanism. */
6192 init_emit ();
6194 /* Initialize the queue of pending postincrement and postdecrements,
6195 and some other info in expr.c. */
6196 init_expr ();
6198 /* We haven't done register allocation yet. */
6199 reg_renumber = 0;
6201 init_varasm_status (cfun);
6203 /* Clear out data used for inlining. */
6204 cfun->inlinable = 0;
6205 cfun->original_decl_initial = 0;
6206 cfun->original_arg_vector = 0;
6208 cfun->stack_alignment_needed = STACK_BOUNDARY;
6209 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6211 /* Set if a call to setjmp is seen. */
6212 current_function_calls_setjmp = 0;
6214 /* Set if a call to longjmp is seen. */
6215 current_function_calls_longjmp = 0;
6217 current_function_calls_alloca = 0;
6218 current_function_contains_functions = 0;
6219 current_function_is_leaf = 0;
6220 current_function_nothrow = 0;
6221 current_function_sp_is_unchanging = 0;
6222 current_function_uses_only_leaf_regs = 0;
6223 current_function_has_computed_jump = 0;
6224 current_function_is_thunk = 0;
6226 current_function_returns_pcc_struct = 0;
6227 current_function_returns_struct = 0;
6228 current_function_epilogue_delay_list = 0;
6229 current_function_uses_const_pool = 0;
6230 current_function_uses_pic_offset_table = 0;
6231 current_function_cannot_inline = 0;
6233 /* We have not yet needed to make a label to jump to for tail-recursion. */
6234 tail_recursion_label = 0;
6236 /* We haven't had a need to make a save area for ap yet. */
6237 arg_pointer_save_area = 0;
6239 /* No stack slots allocated yet. */
6240 frame_offset = 0;
6242 /* No SAVE_EXPRs in this function yet. */
6243 save_expr_regs = 0;
6245 /* No RTL_EXPRs in this function yet. */
6246 rtl_expr_chain = 0;
6248 /* Set up to allocate temporaries. */
6249 init_temp_slots ();
6251 /* Indicate that we need to distinguish between the return value of the
6252 present function and the return value of a function being called. */
6253 rtx_equal_function_value_matters = 1;
6255 /* Indicate that we have not instantiated virtual registers yet. */
6256 virtuals_instantiated = 0;
6258 /* Indicate that we want CONCATs now. */
6259 generating_concat_p = 1;
6261 /* Indicate we have no need of a frame pointer yet. */
6262 frame_pointer_needed = 0;
6264 /* By default assume not stdarg. */
6265 current_function_stdarg = 0;
6267 /* We haven't made any trampolines for this function yet. */
6268 trampoline_list = 0;
6270 init_pending_stack_adjust ();
6271 inhibit_defer_pop = 0;
6273 current_function_outgoing_args_size = 0;
6275 current_function_funcdef_no = funcdef_no++;
6277 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6279 cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6281 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6283 (*lang_hooks.function.init) (cfun);
6284 if (init_machine_status)
6285 cfun->machine = (*init_machine_status) ();
6288 /* Initialize the rtl expansion mechanism so that we can do simple things
6289 like generate sequences. This is used to provide a context during global
6290 initialization of some passes. */
6291 void
6292 init_dummy_function_start ()
6294 prepare_function_start ();
6297 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6298 and initialize static variables for generating RTL for the statements
6299 of the function. */
6301 void
6302 init_function_start (subr, filename, line)
6303 tree subr;
6304 const char *filename;
6305 int line;
6307 prepare_function_start ();
6309 current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6310 cfun->decl = subr;
6312 /* Nonzero if this is a nested function that uses a static chain. */
6314 current_function_needs_context
6315 = (decl_function_context (current_function_decl) != 0
6316 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6318 /* Within function body, compute a type's size as soon it is laid out. */
6319 immediate_size_expand++;
6321 /* Prevent ever trying to delete the first instruction of a function.
6322 Also tell final how to output a linenum before the function prologue.
6323 Note linenums could be missing, e.g. when compiling a Java .class file. */
6324 if (line > 0)
6325 emit_line_note (filename, line);
6327 /* Make sure first insn is a note even if we don't want linenums.
6328 This makes sure the first insn will never be deleted.
6329 Also, final expects a note to appear there. */
6330 emit_note (NULL, NOTE_INSN_DELETED);
6332 /* Set flags used by final.c. */
6333 if (aggregate_value_p (DECL_RESULT (subr)))
6335 #ifdef PCC_STATIC_STRUCT_RETURN
6336 current_function_returns_pcc_struct = 1;
6337 #endif
6338 current_function_returns_struct = 1;
6341 /* Warn if this value is an aggregate type,
6342 regardless of which calling convention we are using for it. */
6343 if (warn_aggregate_return
6344 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6345 warning ("function returns an aggregate");
6347 current_function_returns_pointer
6348 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6351 /* Make sure all values used by the optimization passes have sane
6352 defaults. */
6353 void
6354 init_function_for_compilation ()
6356 reg_renumber = 0;
6358 /* No prologue/epilogue insns yet. */
6359 VARRAY_GROW (prologue, 0);
6360 VARRAY_GROW (epilogue, 0);
6361 VARRAY_GROW (sibcall_epilogue, 0);
6364 /* Expand a call to __main at the beginning of a possible main function. */
6366 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6367 #undef HAS_INIT_SECTION
6368 #define HAS_INIT_SECTION
6369 #endif
6371 void
6372 expand_main_function ()
6374 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6375 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6377 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6378 rtx tmp, seq;
6380 start_sequence ();
6381 /* Forcibly align the stack. */
6382 #ifdef STACK_GROWS_DOWNWARD
6383 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6384 stack_pointer_rtx, 1, OPTAB_WIDEN);
6385 #else
6386 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6387 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6388 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6389 stack_pointer_rtx, 1, OPTAB_WIDEN);
6390 #endif
6391 if (tmp != stack_pointer_rtx)
6392 emit_move_insn (stack_pointer_rtx, tmp);
6394 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6395 tmp = force_reg (Pmode, const0_rtx);
6396 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6397 seq = get_insns ();
6398 end_sequence ();
6400 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6401 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6402 break;
6403 if (tmp)
6404 emit_insn_before (seq, tmp);
6405 else
6406 emit_insn (seq);
6408 #endif
6410 #ifndef HAS_INIT_SECTION
6411 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6412 VOIDmode, 0);
6413 #endif
6416 extern struct obstack permanent_obstack;
6418 /* The PENDING_SIZES represent the sizes of variable-sized types.
6419 Create RTL for the various sizes now (using temporary variables),
6420 so that we can refer to the sizes from the RTL we are generating
6421 for the current function. The PENDING_SIZES are a TREE_LIST. The
6422 TREE_VALUE of each node is a SAVE_EXPR. */
6424 void
6425 expand_pending_sizes (pending_sizes)
6426 tree pending_sizes;
6428 tree tem;
6430 /* Evaluate now the sizes of any types declared among the arguments. */
6431 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6433 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6434 /* Flush the queue in case this parameter declaration has
6435 side-effects. */
6436 emit_queue ();
6440 /* Start the RTL for a new function, and set variables used for
6441 emitting RTL.
6442 SUBR is the FUNCTION_DECL node.
6443 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6444 the function's parameters, which must be run at any return statement. */
6446 void
6447 expand_function_start (subr, parms_have_cleanups)
6448 tree subr;
6449 int parms_have_cleanups;
6451 tree tem;
6452 rtx last_ptr = NULL_RTX;
6454 /* Make sure volatile mem refs aren't considered
6455 valid operands of arithmetic insns. */
6456 init_recog_no_volatile ();
6458 current_function_instrument_entry_exit
6459 = (flag_instrument_function_entry_exit
6460 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6462 current_function_profile
6463 = (profile_flag
6464 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6466 current_function_limit_stack
6467 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6469 /* If function gets a static chain arg, store it in the stack frame.
6470 Do this first, so it gets the first stack slot offset. */
6471 if (current_function_needs_context)
6473 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6475 /* Delay copying static chain if it is not a register to avoid
6476 conflicts with regs used for parameters. */
6477 if (! SMALL_REGISTER_CLASSES
6478 || GET_CODE (static_chain_incoming_rtx) == REG)
6479 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6482 /* If the parameters of this function need cleaning up, get a label
6483 for the beginning of the code which executes those cleanups. This must
6484 be done before doing anything with return_label. */
6485 if (parms_have_cleanups)
6486 cleanup_label = gen_label_rtx ();
6487 else
6488 cleanup_label = 0;
6490 /* Make the label for return statements to jump to. Do not special
6491 case machines with special return instructions -- they will be
6492 handled later during jump, ifcvt, or epilogue creation. */
6493 return_label = gen_label_rtx ();
6495 /* Initialize rtx used to return the value. */
6496 /* Do this before assign_parms so that we copy the struct value address
6497 before any library calls that assign parms might generate. */
6499 /* Decide whether to return the value in memory or in a register. */
6500 if (aggregate_value_p (DECL_RESULT (subr)))
6502 /* Returning something that won't go in a register. */
6503 rtx value_address = 0;
6505 #ifdef PCC_STATIC_STRUCT_RETURN
6506 if (current_function_returns_pcc_struct)
6508 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6509 value_address = assemble_static_space (size);
6511 else
6512 #endif
6514 /* Expect to be passed the address of a place to store the value.
6515 If it is passed as an argument, assign_parms will take care of
6516 it. */
6517 if (struct_value_incoming_rtx)
6519 value_address = gen_reg_rtx (Pmode);
6520 emit_move_insn (value_address, struct_value_incoming_rtx);
6523 if (value_address)
6525 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6526 set_mem_attributes (x, DECL_RESULT (subr), 1);
6527 SET_DECL_RTL (DECL_RESULT (subr), x);
6530 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6531 /* If return mode is void, this decl rtl should not be used. */
6532 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6533 else
6535 /* Compute the return values into a pseudo reg, which we will copy
6536 into the true return register after the cleanups are done. */
6538 /* In order to figure out what mode to use for the pseudo, we
6539 figure out what the mode of the eventual return register will
6540 actually be, and use that. */
6541 rtx hard_reg
6542 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6543 subr, 1);
6545 /* Structures that are returned in registers are not aggregate_value_p,
6546 so we may see a PARALLEL. Don't play pseudo games with this. */
6547 if (! REG_P (hard_reg))
6548 SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
6549 else
6551 /* Create the pseudo. */
6552 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6554 /* Needed because we may need to move this to memory
6555 in case it's a named return value whose address is taken. */
6556 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6560 /* Initialize rtx for parameters and local variables.
6561 In some cases this requires emitting insns. */
6563 assign_parms (subr);
6565 /* Copy the static chain now if it wasn't a register. The delay is to
6566 avoid conflicts with the parameter passing registers. */
6568 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6569 if (GET_CODE (static_chain_incoming_rtx) != REG)
6570 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6572 /* The following was moved from init_function_start.
6573 The move is supposed to make sdb output more accurate. */
6574 /* Indicate the beginning of the function body,
6575 as opposed to parm setup. */
6576 emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6578 if (GET_CODE (get_last_insn ()) != NOTE)
6579 emit_note (NULL, NOTE_INSN_DELETED);
6580 parm_birth_insn = get_last_insn ();
6582 context_display = 0;
6583 if (current_function_needs_context)
6585 /* Fetch static chain values for containing functions. */
6586 tem = decl_function_context (current_function_decl);
6587 /* Copy the static chain pointer into a pseudo. If we have
6588 small register classes, copy the value from memory if
6589 static_chain_incoming_rtx is a REG. */
6590 if (tem)
6592 /* If the static chain originally came in a register, put it back
6593 there, then move it out in the next insn. The reason for
6594 this peculiar code is to satisfy function integration. */
6595 if (SMALL_REGISTER_CLASSES
6596 && GET_CODE (static_chain_incoming_rtx) == REG)
6597 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6598 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6601 while (tem)
6603 tree rtlexp = make_node (RTL_EXPR);
6605 RTL_EXPR_RTL (rtlexp) = last_ptr;
6606 context_display = tree_cons (tem, rtlexp, context_display);
6607 tem = decl_function_context (tem);
6608 if (tem == 0)
6609 break;
6610 /* Chain thru stack frames, assuming pointer to next lexical frame
6611 is found at the place we always store it. */
6612 #ifdef FRAME_GROWS_DOWNWARD
6613 last_ptr = plus_constant (last_ptr,
6614 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6615 #endif
6616 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6617 set_mem_alias_set (last_ptr, get_frame_alias_set ());
6618 last_ptr = copy_to_reg (last_ptr);
6620 /* If we are not optimizing, ensure that we know that this
6621 piece of context is live over the entire function. */
6622 if (! optimize)
6623 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6624 save_expr_regs);
6628 if (current_function_instrument_entry_exit)
6630 rtx fun = DECL_RTL (current_function_decl);
6631 if (GET_CODE (fun) == MEM)
6632 fun = XEXP (fun, 0);
6633 else
6634 abort ();
6635 emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6636 2, fun, Pmode,
6637 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6639 hard_frame_pointer_rtx),
6640 Pmode);
6643 if (current_function_profile)
6645 #ifdef PROFILE_HOOK
6646 PROFILE_HOOK (current_function_funcdef_no);
6647 #endif
6650 /* After the display initializations is where the tail-recursion label
6651 should go, if we end up needing one. Ensure we have a NOTE here
6652 since some things (like trampolines) get placed before this. */
6653 tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6655 /* Evaluate now the sizes of any types declared among the arguments. */
6656 expand_pending_sizes (nreverse (get_pending_sizes ()));
6658 /* Make sure there is a line number after the function entry setup code. */
6659 force_next_line_note ();
6662 /* Undo the effects of init_dummy_function_start. */
6663 void
6664 expand_dummy_function_end ()
6666 /* End any sequences that failed to be closed due to syntax errors. */
6667 while (in_sequence_p ())
6668 end_sequence ();
6670 /* Outside function body, can't compute type's actual size
6671 until next function's body starts. */
6673 free_after_parsing (cfun);
6674 free_after_compilation (cfun);
6675 cfun = 0;
6678 /* Call DOIT for each hard register used as a return value from
6679 the current function. */
6681 void
6682 diddle_return_value (doit, arg)
6683 void (*doit) PARAMS ((rtx, void *));
6684 void *arg;
6686 rtx outgoing = current_function_return_rtx;
6688 if (! outgoing)
6689 return;
6691 if (GET_CODE (outgoing) == REG)
6692 (*doit) (outgoing, arg);
6693 else if (GET_CODE (outgoing) == PARALLEL)
6695 int i;
6697 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6699 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6701 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6702 (*doit) (x, arg);
6707 static void
6708 do_clobber_return_reg (reg, arg)
6709 rtx reg;
6710 void *arg ATTRIBUTE_UNUSED;
6712 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6715 void
6716 clobber_return_register ()
6718 diddle_return_value (do_clobber_return_reg, NULL);
6720 /* In case we do use pseudo to return value, clobber it too. */
6721 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6723 tree decl_result = DECL_RESULT (current_function_decl);
6724 rtx decl_rtl = DECL_RTL (decl_result);
6725 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6727 do_clobber_return_reg (decl_rtl, NULL);
6732 static void
6733 do_use_return_reg (reg, arg)
6734 rtx reg;
6735 void *arg ATTRIBUTE_UNUSED;
6737 emit_insn (gen_rtx_USE (VOIDmode, reg));
6740 void
6741 use_return_register ()
6743 diddle_return_value (do_use_return_reg, NULL);
6746 static GTY(()) rtx initial_trampoline;
6748 /* Generate RTL for the end of the current function.
6749 FILENAME and LINE are the current position in the source file.
6751 It is up to language-specific callers to do cleanups for parameters--
6752 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6754 void
6755 expand_function_end (filename, line, end_bindings)
6756 const char *filename;
6757 int line;
6758 int end_bindings;
6760 tree link;
6761 rtx clobber_after;
6763 finish_expr_for_function ();
6765 /* If arg_pointer_save_area was referenced only from a nested
6766 function, we will not have initialized it yet. Do that now. */
6767 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6768 get_arg_pointer_save_area (cfun);
6770 #ifdef NON_SAVING_SETJMP
6771 /* Don't put any variables in registers if we call setjmp
6772 on a machine that fails to restore the registers. */
6773 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6775 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6776 setjmp_protect (DECL_INITIAL (current_function_decl));
6778 setjmp_protect_args ();
6780 #endif
6782 /* Initialize any trampolines required by this function. */
6783 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6785 tree function = TREE_PURPOSE (link);
6786 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6787 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6788 #ifdef TRAMPOLINE_TEMPLATE
6789 rtx blktramp;
6790 #endif
6791 rtx seq;
6793 #ifdef TRAMPOLINE_TEMPLATE
6794 /* First make sure this compilation has a template for
6795 initializing trampolines. */
6796 if (initial_trampoline == 0)
6798 initial_trampoline
6799 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6800 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6802 #endif
6804 /* Generate insns to initialize the trampoline. */
6805 start_sequence ();
6806 tramp = round_trampoline_addr (XEXP (tramp, 0));
6807 #ifdef TRAMPOLINE_TEMPLATE
6808 blktramp = replace_equiv_address (initial_trampoline, tramp);
6809 emit_block_move (blktramp, initial_trampoline,
6810 GEN_INT (TRAMPOLINE_SIZE));
6811 #endif
6812 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6813 seq = get_insns ();
6814 end_sequence ();
6816 /* Put those insns at entry to the containing function (this one). */
6817 emit_insn_before (seq, tail_recursion_reentry);
6820 /* If we are doing stack checking and this function makes calls,
6821 do a stack probe at the start of the function to ensure we have enough
6822 space for another stack frame. */
6823 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6825 rtx insn, seq;
6827 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6828 if (GET_CODE (insn) == CALL_INSN)
6830 start_sequence ();
6831 probe_stack_range (STACK_CHECK_PROTECT,
6832 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6833 seq = get_insns ();
6834 end_sequence ();
6835 emit_insn_before (seq, tail_recursion_reentry);
6836 break;
6840 /* Warn about unused parms if extra warnings were specified. */
6841 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6842 warning. WARN_UNUSED_PARAMETER is negative when set by
6843 -Wunused. */
6844 if (warn_unused_parameter > 0
6845 || (warn_unused_parameter < 0 && extra_warnings))
6847 tree decl;
6849 for (decl = DECL_ARGUMENTS (current_function_decl);
6850 decl; decl = TREE_CHAIN (decl))
6851 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6852 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6853 warning_with_decl (decl, "unused parameter `%s'");
6856 /* Delete handlers for nonlocal gotos if nothing uses them. */
6857 if (nonlocal_goto_handler_slots != 0
6858 && ! current_function_has_nonlocal_label)
6859 delete_handlers ();
6861 /* End any sequences that failed to be closed due to syntax errors. */
6862 while (in_sequence_p ())
6863 end_sequence ();
6865 /* Outside function body, can't compute type's actual size
6866 until next function's body starts. */
6867 immediate_size_expand--;
6869 clear_pending_stack_adjust ();
6870 do_pending_stack_adjust ();
6872 /* Mark the end of the function body.
6873 If control reaches this insn, the function can drop through
6874 without returning a value. */
6875 emit_note (NULL, NOTE_INSN_FUNCTION_END);
6877 /* Must mark the last line number note in the function, so that the test
6878 coverage code can avoid counting the last line twice. This just tells
6879 the code to ignore the immediately following line note, since there
6880 already exists a copy of this note somewhere above. This line number
6881 note is still needed for debugging though, so we can't delete it. */
6882 if (flag_test_coverage)
6883 emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6885 /* Output a linenumber for the end of the function.
6886 SDB depends on this. */
6887 emit_line_note_force (filename, line);
6889 /* Before the return label (if any), clobber the return
6890 registers so that they are not propagated live to the rest of
6891 the function. This can only happen with functions that drop
6892 through; if there had been a return statement, there would
6893 have either been a return rtx, or a jump to the return label.
6895 We delay actual code generation after the current_function_value_rtx
6896 is computed. */
6897 clobber_after = get_last_insn ();
6899 /* Output the label for the actual return from the function,
6900 if one is expected. This happens either because a function epilogue
6901 is used instead of a return instruction, or because a return was done
6902 with a goto in order to run local cleanups, or because of pcc-style
6903 structure returning. */
6904 if (return_label)
6905 emit_label (return_label);
6907 /* C++ uses this. */
6908 if (end_bindings)
6909 expand_end_bindings (0, 0, 0);
6911 if (current_function_instrument_entry_exit)
6913 rtx fun = DECL_RTL (current_function_decl);
6914 if (GET_CODE (fun) == MEM)
6915 fun = XEXP (fun, 0);
6916 else
6917 abort ();
6918 emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6919 2, fun, Pmode,
6920 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6922 hard_frame_pointer_rtx),
6923 Pmode);
6926 /* Let except.c know where it should emit the call to unregister
6927 the function context for sjlj exceptions. */
6928 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6929 sjlj_emit_function_exit_after (get_last_insn ());
6931 /* If we had calls to alloca, and this machine needs
6932 an accurate stack pointer to exit the function,
6933 insert some code to save and restore the stack pointer. */
6934 #ifdef EXIT_IGNORE_STACK
6935 if (! EXIT_IGNORE_STACK)
6936 #endif
6937 if (current_function_calls_alloca)
6939 rtx tem = 0;
6941 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6942 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6945 /* If scalar return value was computed in a pseudo-reg, or was a named
6946 return value that got dumped to the stack, copy that to the hard
6947 return register. */
6948 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6950 tree decl_result = DECL_RESULT (current_function_decl);
6951 rtx decl_rtl = DECL_RTL (decl_result);
6953 if (REG_P (decl_rtl)
6954 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6955 : DECL_REGISTER (decl_result))
6957 rtx real_decl_rtl = current_function_return_rtx;
6959 /* This should be set in assign_parms. */
6960 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
6961 abort ();
6963 /* If this is a BLKmode structure being returned in registers,
6964 then use the mode computed in expand_return. Note that if
6965 decl_rtl is memory, then its mode may have been changed,
6966 but that current_function_return_rtx has not. */
6967 if (GET_MODE (real_decl_rtl) == BLKmode)
6968 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
6970 /* If a named return value dumped decl_return to memory, then
6971 we may need to re-do the PROMOTE_MODE signed/unsigned
6972 extension. */
6973 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6975 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6977 #ifdef PROMOTE_FUNCTION_RETURN
6978 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6979 &unsignedp, 1);
6980 #endif
6982 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6984 else if (GET_CODE (real_decl_rtl) == PARALLEL)
6985 emit_group_load (real_decl_rtl, decl_rtl,
6986 int_size_in_bytes (TREE_TYPE (decl_result)));
6987 else
6988 emit_move_insn (real_decl_rtl, decl_rtl);
6992 /* If returning a structure, arrange to return the address of the value
6993 in a place where debuggers expect to find it.
6995 If returning a structure PCC style,
6996 the caller also depends on this value.
6997 And current_function_returns_pcc_struct is not necessarily set. */
6998 if (current_function_returns_struct
6999 || current_function_returns_pcc_struct)
7001 rtx value_address
7002 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7003 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7004 #ifdef FUNCTION_OUTGOING_VALUE
7005 rtx outgoing
7006 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7007 current_function_decl);
7008 #else
7009 rtx outgoing
7010 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7011 #endif
7013 /* Mark this as a function return value so integrate will delete the
7014 assignment and USE below when inlining this function. */
7015 REG_FUNCTION_VALUE_P (outgoing) = 1;
7017 #ifdef POINTERS_EXTEND_UNSIGNED
7018 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7019 if (GET_MODE (outgoing) != GET_MODE (value_address))
7020 value_address = convert_memory_address (GET_MODE (outgoing),
7021 value_address);
7022 #endif
7024 emit_move_insn (outgoing, value_address);
7026 /* Show return register used to hold result (in this case the address
7027 of the result. */
7028 current_function_return_rtx = outgoing;
7031 /* If this is an implementation of throw, do what's necessary to
7032 communicate between __builtin_eh_return and the epilogue. */
7033 expand_eh_return ();
7035 /* Emit the actual code to clobber return register. */
7037 rtx seq, after;
7039 start_sequence ();
7040 clobber_return_register ();
7041 seq = get_insns ();
7042 end_sequence ();
7044 after = emit_insn_after (seq, clobber_after);
7046 if (clobber_after != after)
7047 cfun->x_clobber_return_insn = after;
7050 /* ??? This should no longer be necessary since stupid is no longer with
7051 us, but there are some parts of the compiler (eg reload_combine, and
7052 sh mach_dep_reorg) that still try and compute their own lifetime info
7053 instead of using the general framework. */
7054 use_return_register ();
7056 /* Fix up any gotos that jumped out to the outermost
7057 binding level of the function.
7058 Must follow emitting RETURN_LABEL. */
7060 /* If you have any cleanups to do at this point,
7061 and they need to create temporary variables,
7062 then you will lose. */
7063 expand_fixups (get_insns ());
7067 get_arg_pointer_save_area (f)
7068 struct function *f;
7070 rtx ret = f->x_arg_pointer_save_area;
7072 if (! ret)
7074 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7075 f->x_arg_pointer_save_area = ret;
7078 if (f == cfun && ! f->arg_pointer_save_area_init)
7080 rtx seq;
7082 /* Save the arg pointer at the beginning of the function. The
7083 generated stack slot may not be a valid memory address, so we
7084 have to check it and fix it if necessary. */
7085 start_sequence ();
7086 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7087 seq = get_insns ();
7088 end_sequence ();
7090 push_topmost_sequence ();
7091 emit_insn_after (seq, get_insns ());
7092 pop_topmost_sequence ();
7095 return ret;
7098 /* Extend a vector that records the INSN_UIDs of INSNS
7099 (a list of one or more insns). */
7101 static void
7102 record_insns (insns, vecp)
7103 rtx insns;
7104 varray_type *vecp;
7106 int i, len;
7107 rtx tmp;
7109 tmp = insns;
7110 len = 0;
7111 while (tmp != NULL_RTX)
7113 len++;
7114 tmp = NEXT_INSN (tmp);
7117 i = VARRAY_SIZE (*vecp);
7118 VARRAY_GROW (*vecp, i + len);
7119 tmp = insns;
7120 while (tmp != NULL_RTX)
7122 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7123 i++;
7124 tmp = NEXT_INSN (tmp);
7128 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7129 be running after reorg, SEQUENCE rtl is possible. */
7131 static int
7132 contains (insn, vec)
7133 rtx insn;
7134 varray_type vec;
7136 int i, j;
7138 if (GET_CODE (insn) == INSN
7139 && GET_CODE (PATTERN (insn)) == SEQUENCE)
7141 int count = 0;
7142 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7143 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7144 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7145 count++;
7146 return count;
7148 else
7150 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7151 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7152 return 1;
7154 return 0;
7158 prologue_epilogue_contains (insn)
7159 rtx insn;
7161 if (contains (insn, prologue))
7162 return 1;
7163 if (contains (insn, epilogue))
7164 return 1;
7165 return 0;
7169 sibcall_epilogue_contains (insn)
7170 rtx insn;
7172 if (sibcall_epilogue)
7173 return contains (insn, sibcall_epilogue);
7174 return 0;
7177 #ifdef HAVE_return
7178 /* Insert gen_return at the end of block BB. This also means updating
7179 block_for_insn appropriately. */
7181 static void
7182 emit_return_into_block (bb, line_note)
7183 basic_block bb;
7184 rtx line_note;
7186 rtx p, end;
7188 p = NEXT_INSN (bb->end);
7189 end = emit_jump_insn_after (gen_return (), bb->end);
7190 if (line_note)
7191 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7192 NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7194 #endif /* HAVE_return */
7196 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7198 /* These functions convert the epilogue into a variant that does not modify the
7199 stack pointer. This is used in cases where a function returns an object
7200 whose size is not known until it is computed. The called function leaves the
7201 object on the stack, leaves the stack depressed, and returns a pointer to
7202 the object.
7204 What we need to do is track all modifications and references to the stack
7205 pointer, deleting the modifications and changing the references to point to
7206 the location the stack pointer would have pointed to had the modifications
7207 taken place.
7209 These functions need to be portable so we need to make as few assumptions
7210 about the epilogue as we can. However, the epilogue basically contains
7211 three things: instructions to reset the stack pointer, instructions to
7212 reload registers, possibly including the frame pointer, and an
7213 instruction to return to the caller.
7215 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7216 We also make no attempt to validate the insns we make since if they are
7217 invalid, we probably can't do anything valid. The intent is that these
7218 routines get "smarter" as more and more machines start to use them and
7219 they try operating on different epilogues.
7221 We use the following structure to track what the part of the epilogue that
7222 we've already processed has done. We keep two copies of the SP equivalence,
7223 one for use during the insn we are processing and one for use in the next
7224 insn. The difference is because one part of a PARALLEL may adjust SP
7225 and the other may use it. */
7227 struct epi_info
7229 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
7230 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
7231 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
7232 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
7233 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
7234 should be set to once we no longer need
7235 its value. */
7238 static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7239 static void emit_equiv_load PARAMS ((struct epi_info *));
7241 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7242 no modifications to the stack pointer. Return the new list of insns. */
7244 static rtx
7245 keep_stack_depressed (insns)
7246 rtx insns;
7248 int j;
7249 struct epi_info info;
7250 rtx insn, next;
7252 /* If the epilogue is just a single instruction, it ust be OK as is. */
7254 if (NEXT_INSN (insns) == NULL_RTX)
7255 return insns;
7257 /* Otherwise, start a sequence, initialize the information we have, and
7258 process all the insns we were given. */
7259 start_sequence ();
7261 info.sp_equiv_reg = stack_pointer_rtx;
7262 info.sp_offset = 0;
7263 info.equiv_reg_src = 0;
7265 insn = insns;
7266 next = NULL_RTX;
7267 while (insn != NULL_RTX)
7269 next = NEXT_INSN (insn);
7271 if (!INSN_P (insn))
7273 add_insn (insn);
7274 insn = next;
7275 continue;
7278 /* If this insn references the register that SP is equivalent to and
7279 we have a pending load to that register, we must force out the load
7280 first and then indicate we no longer know what SP's equivalent is. */
7281 if (info.equiv_reg_src != 0
7282 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7284 emit_equiv_load (&info);
7285 info.sp_equiv_reg = 0;
7288 info.new_sp_equiv_reg = info.sp_equiv_reg;
7289 info.new_sp_offset = info.sp_offset;
7291 /* If this is a (RETURN) and the return address is on the stack,
7292 update the address and change to an indirect jump. */
7293 if (GET_CODE (PATTERN (insn)) == RETURN
7294 || (GET_CODE (PATTERN (insn)) == PARALLEL
7295 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7297 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7298 rtx base = 0;
7299 HOST_WIDE_INT offset = 0;
7300 rtx jump_insn, jump_set;
7302 /* If the return address is in a register, we can emit the insn
7303 unchanged. Otherwise, it must be a MEM and we see what the
7304 base register and offset are. In any case, we have to emit any
7305 pending load to the equivalent reg of SP, if any. */
7306 if (GET_CODE (retaddr) == REG)
7308 emit_equiv_load (&info);
7309 add_insn (insn);
7310 insn = next;
7311 continue;
7313 else if (GET_CODE (retaddr) == MEM
7314 && GET_CODE (XEXP (retaddr, 0)) == REG)
7315 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7316 else if (GET_CODE (retaddr) == MEM
7317 && GET_CODE (XEXP (retaddr, 0)) == PLUS
7318 && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7319 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7321 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7322 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7324 else
7325 abort ();
7327 /* If the base of the location containing the return pointer
7328 is SP, we must update it with the replacement address. Otherwise,
7329 just build the necessary MEM. */
7330 retaddr = plus_constant (base, offset);
7331 if (base == stack_pointer_rtx)
7332 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7333 plus_constant (info.sp_equiv_reg,
7334 info.sp_offset));
7336 retaddr = gen_rtx_MEM (Pmode, retaddr);
7338 /* If there is a pending load to the equivalent register for SP
7339 and we reference that register, we must load our address into
7340 a scratch register and then do that load. */
7341 if (info.equiv_reg_src
7342 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7344 unsigned int regno;
7345 rtx reg;
7347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7348 if (HARD_REGNO_MODE_OK (regno, Pmode)
7349 && !fixed_regs[regno]
7350 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7351 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7352 regno)
7353 && !refers_to_regno_p (regno,
7354 regno + HARD_REGNO_NREGS (regno,
7355 Pmode),
7356 info.equiv_reg_src, NULL))
7357 break;
7359 if (regno == FIRST_PSEUDO_REGISTER)
7360 abort ();
7362 reg = gen_rtx_REG (Pmode, regno);
7363 emit_move_insn (reg, retaddr);
7364 retaddr = reg;
7367 emit_equiv_load (&info);
7368 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7370 /* Show the SET in the above insn is a RETURN. */
7371 jump_set = single_set (jump_insn);
7372 if (jump_set == 0)
7373 abort ();
7374 else
7375 SET_IS_RETURN_P (jump_set) = 1;
7378 /* If SP is not mentioned in the pattern and its equivalent register, if
7379 any, is not modified, just emit it. Otherwise, if neither is set,
7380 replace the reference to SP and emit the insn. If none of those are
7381 true, handle each SET individually. */
7382 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7383 && (info.sp_equiv_reg == stack_pointer_rtx
7384 || !reg_set_p (info.sp_equiv_reg, insn)))
7385 add_insn (insn);
7386 else if (! reg_set_p (stack_pointer_rtx, insn)
7387 && (info.sp_equiv_reg == stack_pointer_rtx
7388 || !reg_set_p (info.sp_equiv_reg, insn)))
7390 if (! validate_replace_rtx (stack_pointer_rtx,
7391 plus_constant (info.sp_equiv_reg,
7392 info.sp_offset),
7393 insn))
7394 abort ();
7396 add_insn (insn);
7398 else if (GET_CODE (PATTERN (insn)) == SET)
7399 handle_epilogue_set (PATTERN (insn), &info);
7400 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7402 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7403 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7404 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7406 else
7407 add_insn (insn);
7409 info.sp_equiv_reg = info.new_sp_equiv_reg;
7410 info.sp_offset = info.new_sp_offset;
7412 insn = next;
7415 insns = get_insns ();
7416 end_sequence ();
7417 return insns;
7420 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7421 structure that contains information about what we've seen so far. We
7422 process this SET by either updating that data or by emitting one or
7423 more insns. */
7425 static void
7426 handle_epilogue_set (set, p)
7427 rtx set;
7428 struct epi_info *p;
7430 /* First handle the case where we are setting SP. Record what it is being
7431 set from. If unknown, abort. */
7432 if (reg_set_p (stack_pointer_rtx, set))
7434 if (SET_DEST (set) != stack_pointer_rtx)
7435 abort ();
7437 if (GET_CODE (SET_SRC (set)) == PLUS
7438 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7440 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7441 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7443 else
7444 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7446 /* If we are adjusting SP, we adjust from the old data. */
7447 if (p->new_sp_equiv_reg == stack_pointer_rtx)
7449 p->new_sp_equiv_reg = p->sp_equiv_reg;
7450 p->new_sp_offset += p->sp_offset;
7453 if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7454 abort ();
7456 return;
7459 /* Next handle the case where we are setting SP's equivalent register.
7460 If we already have a value to set it to, abort. We could update, but
7461 there seems little point in handling that case. Note that we have
7462 to allow for the case where we are setting the register set in
7463 the previous part of a PARALLEL inside a single insn. But use the
7464 old offset for any updates within this insn. */
7465 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7467 if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7468 || p->equiv_reg_src != 0)
7469 abort ();
7470 else
7471 p->equiv_reg_src
7472 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7473 plus_constant (p->sp_equiv_reg,
7474 p->sp_offset));
7477 /* Otherwise, replace any references to SP in the insn to its new value
7478 and emit the insn. */
7479 else
7481 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7482 plus_constant (p->sp_equiv_reg,
7483 p->sp_offset));
7484 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7485 plus_constant (p->sp_equiv_reg,
7486 p->sp_offset));
7487 emit_insn (set);
7491 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7493 static void
7494 emit_equiv_load (p)
7495 struct epi_info *p;
7497 if (p->equiv_reg_src != 0)
7498 emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7500 p->equiv_reg_src = 0;
7502 #endif
7504 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7505 this into place with notes indicating where the prologue ends and where
7506 the epilogue begins. Update the basic block information when possible. */
7508 void
7509 thread_prologue_and_epilogue_insns (f)
7510 rtx f ATTRIBUTE_UNUSED;
7512 int inserted = 0;
7513 edge e;
7514 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7515 rtx seq;
7516 #endif
7517 #ifdef HAVE_prologue
7518 rtx prologue_end = NULL_RTX;
7519 #endif
7520 #if defined (HAVE_epilogue) || defined(HAVE_return)
7521 rtx epilogue_end = NULL_RTX;
7522 #endif
7524 #ifdef HAVE_prologue
7525 if (HAVE_prologue)
7527 start_sequence ();
7528 seq = gen_prologue ();
7529 emit_insn (seq);
7531 /* Retain a map of the prologue insns. */
7532 record_insns (seq, &prologue);
7533 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7535 seq = get_insns ();
7536 end_sequence ();
7538 /* Can't deal with multiple successors of the entry block
7539 at the moment. Function should always have at least one
7540 entry point. */
7541 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7542 abort ();
7544 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7545 inserted = 1;
7547 #endif
7549 /* If the exit block has no non-fake predecessors, we don't need
7550 an epilogue. */
7551 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7552 if ((e->flags & EDGE_FAKE) == 0)
7553 break;
7554 if (e == NULL)
7555 goto epilogue_done;
7557 #ifdef HAVE_return
7558 if (optimize && HAVE_return)
7560 /* If we're allowed to generate a simple return instruction,
7561 then by definition we don't need a full epilogue. Examine
7562 the block that falls through to EXIT. If it does not
7563 contain any code, examine its predecessors and try to
7564 emit (conditional) return instructions. */
7566 basic_block last;
7567 edge e_next;
7568 rtx label;
7570 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7571 if (e->flags & EDGE_FALLTHRU)
7572 break;
7573 if (e == NULL)
7574 goto epilogue_done;
7575 last = e->src;
7577 /* Verify that there are no active instructions in the last block. */
7578 label = last->end;
7579 while (label && GET_CODE (label) != CODE_LABEL)
7581 if (active_insn_p (label))
7582 break;
7583 label = PREV_INSN (label);
7586 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7588 rtx epilogue_line_note = NULL_RTX;
7590 /* Locate the line number associated with the closing brace,
7591 if we can find one. */
7592 for (seq = get_last_insn ();
7593 seq && ! active_insn_p (seq);
7594 seq = PREV_INSN (seq))
7595 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7597 epilogue_line_note = seq;
7598 break;
7601 for (e = last->pred; e; e = e_next)
7603 basic_block bb = e->src;
7604 rtx jump;
7606 e_next = e->pred_next;
7607 if (bb == ENTRY_BLOCK_PTR)
7608 continue;
7610 jump = bb->end;
7611 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7612 continue;
7614 /* If we have an unconditional jump, we can replace that
7615 with a simple return instruction. */
7616 if (simplejump_p (jump))
7618 emit_return_into_block (bb, epilogue_line_note);
7619 delete_insn (jump);
7622 /* If we have a conditional jump, we can try to replace
7623 that with a conditional return instruction. */
7624 else if (condjump_p (jump))
7626 rtx ret, *loc;
7628 ret = SET_SRC (PATTERN (jump));
7629 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7630 loc = &XEXP (ret, 1);
7631 else
7632 loc = &XEXP (ret, 2);
7633 ret = gen_rtx_RETURN (VOIDmode);
7635 if (! validate_change (jump, loc, ret, 0))
7636 continue;
7637 if (JUMP_LABEL (jump))
7638 LABEL_NUSES (JUMP_LABEL (jump))--;
7640 /* If this block has only one successor, it both jumps
7641 and falls through to the fallthru block, so we can't
7642 delete the edge. */
7643 if (bb->succ->succ_next == NULL)
7644 continue;
7646 else
7647 continue;
7649 /* Fix up the CFG for the successful change we just made. */
7650 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7653 /* Emit a return insn for the exit fallthru block. Whether
7654 this is still reachable will be determined later. */
7656 emit_barrier_after (last->end);
7657 emit_return_into_block (last, epilogue_line_note);
7658 epilogue_end = last->end;
7659 last->succ->flags &= ~EDGE_FALLTHRU;
7660 goto epilogue_done;
7663 #endif
7664 #ifdef HAVE_epilogue
7665 if (HAVE_epilogue)
7667 /* Find the edge that falls through to EXIT. Other edges may exist
7668 due to RETURN instructions, but those don't need epilogues.
7669 There really shouldn't be a mixture -- either all should have
7670 been converted or none, however... */
7672 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7673 if (e->flags & EDGE_FALLTHRU)
7674 break;
7675 if (e == NULL)
7676 goto epilogue_done;
7678 start_sequence ();
7679 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7681 seq = gen_epilogue ();
7683 #ifdef INCOMING_RETURN_ADDR_RTX
7684 /* If this function returns with the stack depressed and we can support
7685 it, massage the epilogue to actually do that. */
7686 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7687 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7688 seq = keep_stack_depressed (seq);
7689 #endif
7691 emit_jump_insn (seq);
7693 /* Retain a map of the epilogue insns. */
7694 record_insns (seq, &epilogue);
7696 seq = get_insns ();
7697 end_sequence ();
7699 insert_insn_on_edge (seq, e);
7700 inserted = 1;
7702 #endif
7703 epilogue_done:
7705 if (inserted)
7706 commit_edge_insertions ();
7708 #ifdef HAVE_sibcall_epilogue
7709 /* Emit sibling epilogues before any sibling call sites. */
7710 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7712 basic_block bb = e->src;
7713 rtx insn = bb->end;
7714 rtx i;
7715 rtx newinsn;
7717 if (GET_CODE (insn) != CALL_INSN
7718 || ! SIBLING_CALL_P (insn))
7719 continue;
7721 start_sequence ();
7722 emit_insn (gen_sibcall_epilogue ());
7723 seq = get_insns ();
7724 end_sequence ();
7726 /* Retain a map of the epilogue insns. Used in life analysis to
7727 avoid getting rid of sibcall epilogue insns. Do this before we
7728 actually emit the sequence. */
7729 record_insns (seq, &sibcall_epilogue);
7731 i = PREV_INSN (insn);
7732 newinsn = emit_insn_before (seq, insn);
7734 #endif
7736 #ifdef HAVE_prologue
7737 if (prologue_end)
7739 rtx insn, prev;
7741 /* GDB handles `break f' by setting a breakpoint on the first
7742 line note after the prologue. Which means (1) that if
7743 there are line number notes before where we inserted the
7744 prologue we should move them, and (2) we should generate a
7745 note before the end of the first basic block, if there isn't
7746 one already there.
7748 ??? This behaviour is completely broken when dealing with
7749 multiple entry functions. We simply place the note always
7750 into first basic block and let alternate entry points
7751 to be missed.
7754 for (insn = prologue_end; insn; insn = prev)
7756 prev = PREV_INSN (insn);
7757 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7759 /* Note that we cannot reorder the first insn in the
7760 chain, since rest_of_compilation relies on that
7761 remaining constant. */
7762 if (prev == NULL)
7763 break;
7764 reorder_insns (insn, insn, prologue_end);
7768 /* Find the last line number note in the first block. */
7769 for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7770 insn != prologue_end && insn;
7771 insn = PREV_INSN (insn))
7772 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7773 break;
7775 /* If we didn't find one, make a copy of the first line number
7776 we run across. */
7777 if (! insn)
7779 for (insn = next_active_insn (prologue_end);
7780 insn;
7781 insn = PREV_INSN (insn))
7782 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7784 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7785 NOTE_LINE_NUMBER (insn),
7786 prologue_end);
7787 break;
7791 #endif
7792 #ifdef HAVE_epilogue
7793 if (epilogue_end)
7795 rtx insn, next;
7797 /* Similarly, move any line notes that appear after the epilogue.
7798 There is no need, however, to be quite so anal about the existence
7799 of such a note. */
7800 for (insn = epilogue_end; insn; insn = next)
7802 next = NEXT_INSN (insn);
7803 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7804 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7807 #endif
7810 /* Reposition the prologue-end and epilogue-begin notes after instruction
7811 scheduling and delayed branch scheduling. */
7813 void
7814 reposition_prologue_and_epilogue_notes (f)
7815 rtx f ATTRIBUTE_UNUSED;
7817 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7818 rtx insn, last, note;
7819 int len;
7821 if ((len = VARRAY_SIZE (prologue)) > 0)
7823 last = 0, note = 0;
7825 /* Scan from the beginning until we reach the last prologue insn.
7826 We apparently can't depend on basic_block_{head,end} after
7827 reorg has run. */
7828 for (insn = f; insn; insn = NEXT_INSN (insn))
7830 if (GET_CODE (insn) == NOTE)
7832 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7833 note = insn;
7835 else if (contains (insn, prologue))
7837 last = insn;
7838 if (--len == 0)
7839 break;
7843 if (last)
7845 rtx next;
7847 /* Find the prologue-end note if we haven't already, and
7848 move it to just after the last prologue insn. */
7849 if (note == 0)
7851 for (note = last; (note = NEXT_INSN (note));)
7852 if (GET_CODE (note) == NOTE
7853 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7854 break;
7857 next = NEXT_INSN (note);
7859 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7860 if (GET_CODE (last) == CODE_LABEL)
7861 last = NEXT_INSN (last);
7862 reorder_insns (note, note, last);
7866 if ((len = VARRAY_SIZE (epilogue)) > 0)
7868 last = 0, note = 0;
7870 /* Scan from the end until we reach the first epilogue insn.
7871 We apparently can't depend on basic_block_{head,end} after
7872 reorg has run. */
7873 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7875 if (GET_CODE (insn) == NOTE)
7877 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7878 note = insn;
7880 else if (contains (insn, epilogue))
7882 last = insn;
7883 if (--len == 0)
7884 break;
7888 if (last)
7890 /* Find the epilogue-begin note if we haven't already, and
7891 move it to just before the first epilogue insn. */
7892 if (note == 0)
7894 for (note = insn; (note = PREV_INSN (note));)
7895 if (GET_CODE (note) == NOTE
7896 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7897 break;
7900 if (PREV_INSN (last) != note)
7901 reorder_insns (note, note, PREV_INSN (last));
7904 #endif /* HAVE_prologue or HAVE_epilogue */
7907 /* Called once, at initialization, to initialize function.c. */
7909 void
7910 init_function_once ()
7912 VARRAY_INT_INIT (prologue, 0, "prologue");
7913 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7914 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7917 #include "gt-function.h"