* arm.c (arm_split_constant): Don't try to force a constant to
[official-gcc.git] / gcc / function.c
blobd6d5aa6557fc20f52c2c77d37f3112762f42e5fc
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
60 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
61 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
62 #endif
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 /* Some systems use __main in a way incompatible with its use in gcc, in these
73 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74 give the same symbol without quotes for an alternative entry point. You
75 must define both, or neither. */
76 #ifndef NAME__MAIN
77 #define NAME__MAIN "__main"
78 #define SYMBOL__MAIN __main
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
100 /* Number of bytes of args popped by function being compiled on its return.
101 Zero if no bytes are to be popped.
102 May affect compilation of return insn or of function epilogue. */
104 int current_function_pops_args;
106 /* Nonzero if function being compiled needs to be given an address
107 where the value should be stored. */
109 int current_function_returns_struct;
111 /* Nonzero if function being compiled needs to
112 return the address of where it has put a structure value. */
114 int current_function_returns_pcc_struct;
116 /* Nonzero if function being compiled needs to be passed a static chain. */
118 int current_function_needs_context;
120 /* Nonzero if function being compiled can call setjmp. */
122 int current_function_calls_setjmp;
124 /* Nonzero if function being compiled can call longjmp. */
126 int current_function_calls_longjmp;
128 /* Nonzero if function being compiled receives nonlocal gotos
129 from nested functions. */
131 int current_function_has_nonlocal_label;
133 /* Nonzero if function being compiled has nonlocal gotos to parent
134 function. */
136 int current_function_has_nonlocal_goto;
138 /* Nonzero if function being compiled contains nested functions. */
140 int current_function_contains_functions;
142 /* Nonzero if function being compiled doesn't modify the stack pointer
143 (ignoring the prologue and epilogue). This is only valid after
144 life_analysis has run. */
146 int current_function_sp_is_unchanging;
148 /* Nonzero if the function being compiled issues a computed jump. */
150 int current_function_has_computed_jump;
152 /* Nonzero if the current function is a thunk (a lightweight function that
153 just adjusts one of its arguments and forwards to another function), so
154 we should try to cut corners where we can. */
155 int current_function_is_thunk;
157 /* Nonzero if function being compiled can call alloca,
158 either as a subroutine or builtin. */
160 int current_function_calls_alloca;
162 /* Nonzero if the current function returns a pointer type */
164 int current_function_returns_pointer;
166 /* If some insns can be deferred to the delay slots of the epilogue, the
167 delay list for them is recorded here. */
169 rtx current_function_epilogue_delay_list;
171 /* If function's args have a fixed size, this is that size, in bytes.
172 Otherwise, it is -1.
173 May affect compilation of return insn or of function epilogue. */
175 int current_function_args_size;
177 /* # bytes the prologue should push and pretend that the caller pushed them.
178 The prologue must do this, but only if parms can be passed in registers. */
180 int current_function_pretend_args_size;
182 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
183 defined, the needed space is pushed by the prologue. */
185 int current_function_outgoing_args_size;
187 /* This is the offset from the arg pointer to the place where the first
188 anonymous arg can be found, if there is one. */
190 rtx current_function_arg_offset_rtx;
192 /* Nonzero if current function uses varargs.h or equivalent.
193 Zero for functions that use stdarg.h. */
195 int current_function_varargs;
197 /* Nonzero if current function uses stdarg.h or equivalent.
198 Zero for functions that use varargs.h. */
200 int current_function_stdarg;
202 /* Quantities of various kinds of registers
203 used for the current function's args. */
205 CUMULATIVE_ARGS current_function_args_info;
207 /* Name of function now being compiled. */
209 char *current_function_name;
211 /* If non-zero, an RTL expression for the location at which the current
212 function returns its result. If the current function returns its
213 result in a register, current_function_return_rtx will always be
214 the hard register containing the result. */
216 rtx current_function_return_rtx;
218 /* Nonzero if the current function uses the constant pool. */
220 int current_function_uses_const_pool;
222 /* Nonzero if the current function uses pic_offset_table_rtx. */
223 int current_function_uses_pic_offset_table;
225 /* The arg pointer hard register, or the pseudo into which it was copied. */
226 rtx current_function_internal_arg_pointer;
228 /* Language-specific reason why the current function cannot be made inline. */
229 char *current_function_cannot_inline;
231 /* Nonzero if instrumentation calls for function entry and exit should be
232 generated. */
233 int current_function_instrument_entry_exit;
235 /* Nonzero if memory access checking be enabled in the current function. */
236 int current_function_check_memory_usage;
238 /* The FUNCTION_DECL for an inline function currently being expanded. */
239 tree inline_function_decl;
241 /* Number of function calls seen so far in current function. */
243 int function_call_count;
245 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
246 (labels to which there can be nonlocal gotos from nested functions)
247 in this function. */
249 tree nonlocal_labels;
251 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
252 for nonlocal gotos. There is one for every nonlocal label in the function;
253 this list matches the one in nonlocal_labels.
254 Zero when function does not have nonlocal labels. */
256 rtx nonlocal_goto_handler_slots;
258 /* List (chain of EXPR_LIST) of labels heading the current handlers for
259 nonlocal gotos. */
261 rtx nonlocal_goto_handler_labels;
263 /* RTX for stack slot that holds the stack pointer value to restore
264 for a nonlocal goto.
265 Zero when function does not have nonlocal labels. */
267 rtx nonlocal_goto_stack_level;
269 /* Label that will go on parm cleanup code, if any.
270 Jumping to this label runs cleanup code for parameters, if
271 such code must be run. Following this code is the logical return label. */
273 rtx cleanup_label;
275 /* Label that will go on function epilogue.
276 Jumping to this label serves as a "return" instruction
277 on machines which require execution of the epilogue on all returns. */
279 rtx return_label;
281 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
282 So we can mark them all live at the end of the function, if nonopt. */
283 rtx save_expr_regs;
285 /* List (chain of EXPR_LISTs) of all stack slots in this function.
286 Made for the sake of unshare_all_rtl. */
287 rtx stack_slot_list;
289 /* Chain of all RTL_EXPRs that have insns in them. */
290 tree rtl_expr_chain;
292 /* Label to jump back to for tail recursion, or 0 if we have
293 not yet needed one for this function. */
294 rtx tail_recursion_label;
296 /* Place after which to insert the tail_recursion_label if we need one. */
297 rtx tail_recursion_reentry;
299 /* Location at which to save the argument pointer if it will need to be
300 referenced. There are two cases where this is done: if nonlocal gotos
301 exist, or if vars stored at an offset from the argument pointer will be
302 needed by inner routines. */
304 rtx arg_pointer_save_area;
306 /* Offset to end of allocated area of stack frame.
307 If stack grows down, this is the address of the last stack slot allocated.
308 If stack grows up, this is the address for the next slot. */
309 HOST_WIDE_INT frame_offset;
311 /* List (chain of TREE_LISTs) of static chains for containing functions.
312 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
313 in an RTL_EXPR in the TREE_VALUE. */
314 static tree context_display;
316 /* List (chain of TREE_LISTs) of trampolines for nested functions.
317 The trampoline sets up the static chain and jumps to the function.
318 We supply the trampoline's address when the function's address is requested.
320 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
321 in an RTL_EXPR in the TREE_VALUE. */
322 static tree trampoline_list;
324 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
325 static rtx parm_birth_insn;
327 #if 0
328 /* Nonzero if a stack slot has been generated whose address is not
329 actually valid. It means that the generated rtl must all be scanned
330 to detect and correct the invalid addresses where they occur. */
331 static int invalid_stack_slot;
332 #endif
334 /* Last insn of those whose job was to put parms into their nominal homes. */
335 static rtx last_parm_insn;
337 /* 1 + last pseudo register number possibly used for loading a copy
338 of a parameter of this function. */
339 int max_parm_reg;
341 /* Vector indexed by REGNO, containing location on stack in which
342 to put the parm which is nominally in pseudo register REGNO,
343 if we discover that that parm must go in the stack. The highest
344 element in this vector is one less than MAX_PARM_REG, above. */
345 rtx *parm_reg_stack_loc;
347 /* Nonzero once virtual register instantiation has been done.
348 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
349 static int virtuals_instantiated;
351 /* These variables hold pointers to functions to
352 save and restore machine-specific data,
353 in push_function_context and pop_function_context. */
354 void (*save_machine_status) PROTO((struct function *));
355 void (*restore_machine_status) PROTO((struct function *));
357 /* Nonzero if we need to distinguish between the return value of this function
358 and the return value of a function called by this function. This helps
359 integrate.c */
361 extern int rtx_equal_function_value_matters;
362 extern tree sequence_rtl_expr;
364 /* In order to evaluate some expressions, such as function calls returning
365 structures in memory, we need to temporarily allocate stack locations.
366 We record each allocated temporary in the following structure.
368 Associated with each temporary slot is a nesting level. When we pop up
369 one level, all temporaries associated with the previous level are freed.
370 Normally, all temporaries are freed after the execution of the statement
371 in which they were created. However, if we are inside a ({...}) grouping,
372 the result may be in a temporary and hence must be preserved. If the
373 result could be in a temporary, we preserve it if we can determine which
374 one it is in. If we cannot determine which temporary may contain the
375 result, all temporaries are preserved. A temporary is preserved by
376 pretending it was allocated at the previous nesting level.
378 Automatic variables are also assigned temporary slots, at the nesting
379 level where they are defined. They are marked a "kept" so that
380 free_temp_slots will not free them. */
382 struct temp_slot
384 /* Points to next temporary slot. */
385 struct temp_slot *next;
386 /* The rtx to used to reference the slot. */
387 rtx slot;
388 /* The rtx used to represent the address if not the address of the
389 slot above. May be an EXPR_LIST if multiple addresses exist. */
390 rtx address;
391 /* The alignment (in bits) of the slot. */
392 int align;
393 /* The size, in units, of the slot. */
394 HOST_WIDE_INT size;
395 /* The alias set for the slot. If the alias set is zero, we don't
396 know anything about the alias set of the slot. We must only
397 reuse a slot if it is assigned an object of the same alias set.
398 Otherwise, the rest of the compiler may assume that the new use
399 of the slot cannot alias the old use of the slot, which is
400 false. If the slot has alias set zero, then we can't reuse the
401 slot at all, since we have no idea what alias set may have been
402 imposed on the memory. For example, if the stack slot is the
403 call frame for an inline functioned, we have no idea what alias
404 sets will be assigned to various pieces of the call frame. */
405 int alias_set;
406 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
407 tree rtl_expr;
408 /* Non-zero if this temporary is currently in use. */
409 char in_use;
410 /* Non-zero if this temporary has its address taken. */
411 char addr_taken;
412 /* Nesting level at which this slot is being used. */
413 int level;
414 /* Non-zero if this should survive a call to free_temp_slots. */
415 int keep;
416 /* The offset of the slot from the frame_pointer, including extra space
417 for alignment. This info is for combine_temp_slots. */
418 HOST_WIDE_INT base_offset;
419 /* The size of the slot, including extra space for alignment. This
420 info is for combine_temp_slots. */
421 HOST_WIDE_INT full_size;
424 /* List of all temporaries allocated, both available and in use. */
426 struct temp_slot *temp_slots;
428 /* Current nesting level for temporaries. */
430 int temp_slot_level;
432 /* Current nesting level for variables in a block. */
434 int var_temp_slot_level;
436 /* When temporaries are created by TARGET_EXPRs, they are created at
437 this level of temp_slot_level, so that they can remain allocated
438 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
439 of TARGET_EXPRs. */
440 int target_temp_slot_level;
442 /* This structure is used to record MEMs or pseudos used to replace VAR, any
443 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
444 maintain this list in case two operands of an insn were required to match;
445 in that case we must ensure we use the same replacement. */
447 struct fixup_replacement
449 rtx old;
450 rtx new;
451 struct fixup_replacement *next;
454 /* Forward declarations. */
456 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
457 int, struct function *));
458 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
459 int, tree));
460 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
461 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
462 enum machine_mode, enum machine_mode,
463 int, int, int));
464 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
465 static struct fixup_replacement
466 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
467 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
468 rtx, int));
469 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
470 struct fixup_replacement **));
471 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
472 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
473 static rtx fixup_stack_1 PROTO((rtx, rtx));
474 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
475 static void instantiate_decls PROTO((tree, int));
476 static void instantiate_decls_1 PROTO((tree, int));
477 static void instantiate_decl PROTO((rtx, int, int));
478 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
479 static void delete_handlers PROTO((void));
480 static void pad_to_arg_alignment PROTO((struct args_size *, int));
481 #ifndef ARGS_GROW_DOWNWARD
482 static void pad_below PROTO((struct args_size *, enum machine_mode,
483 tree));
484 #endif
485 #ifdef ARGS_GROW_DOWNWARD
486 static tree round_down PROTO((tree, int));
487 #endif
488 static rtx round_trampoline_addr PROTO((rtx));
489 static tree blocks_nreverse PROTO((tree));
490 static int all_blocks PROTO((tree, tree *));
491 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
492 static int *record_insns PROTO((rtx));
493 static int contains PROTO((rtx, int *));
494 #endif /* HAVE_prologue || HAVE_epilogue */
495 static void put_addressof_into_stack PROTO((rtx));
496 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
498 /* Pointer to chain of `struct function' for containing functions. */
499 struct function *outer_function_chain;
501 /* Given a function decl for a containing function,
502 return the `struct function' for it. */
504 struct function *
505 find_function_data (decl)
506 tree decl;
508 struct function *p;
510 for (p = outer_function_chain; p; p = p->next)
511 if (p->decl == decl)
512 return p;
514 abort ();
517 /* Save the current context for compilation of a nested function.
518 This is called from language-specific code.
519 The caller is responsible for saving any language-specific status,
520 since this function knows only about language-independent variables. */
522 void
523 push_function_context_to (context)
524 tree context;
526 struct function *p = (struct function *) xmalloc (sizeof (struct function));
528 p->next = outer_function_chain;
529 outer_function_chain = p;
531 p->name = current_function_name;
532 p->decl = current_function_decl;
533 p->pops_args = current_function_pops_args;
534 p->returns_struct = current_function_returns_struct;
535 p->returns_pcc_struct = current_function_returns_pcc_struct;
536 p->returns_pointer = current_function_returns_pointer;
537 p->needs_context = current_function_needs_context;
538 p->calls_setjmp = current_function_calls_setjmp;
539 p->calls_longjmp = current_function_calls_longjmp;
540 p->calls_alloca = current_function_calls_alloca;
541 p->has_nonlocal_label = current_function_has_nonlocal_label;
542 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
543 p->contains_functions = current_function_contains_functions;
544 p->has_computed_jump = current_function_has_computed_jump;
545 p->is_thunk = current_function_is_thunk;
546 p->args_size = current_function_args_size;
547 p->pretend_args_size = current_function_pretend_args_size;
548 p->arg_offset_rtx = current_function_arg_offset_rtx;
549 p->varargs = current_function_varargs;
550 p->stdarg = current_function_stdarg;
551 p->uses_const_pool = current_function_uses_const_pool;
552 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
553 p->internal_arg_pointer = current_function_internal_arg_pointer;
554 p->cannot_inline = current_function_cannot_inline;
555 p->max_parm_reg = max_parm_reg;
556 p->parm_reg_stack_loc = parm_reg_stack_loc;
557 p->outgoing_args_size = current_function_outgoing_args_size;
558 p->return_rtx = current_function_return_rtx;
559 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
560 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
561 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
562 p->nonlocal_labels = nonlocal_labels;
563 p->cleanup_label = cleanup_label;
564 p->return_label = return_label;
565 p->save_expr_regs = save_expr_regs;
566 p->stack_slot_list = stack_slot_list;
567 p->parm_birth_insn = parm_birth_insn;
568 p->frame_offset = frame_offset;
569 p->tail_recursion_label = tail_recursion_label;
570 p->tail_recursion_reentry = tail_recursion_reentry;
571 p->arg_pointer_save_area = arg_pointer_save_area;
572 p->rtl_expr_chain = rtl_expr_chain;
573 p->last_parm_insn = last_parm_insn;
574 p->context_display = context_display;
575 p->trampoline_list = trampoline_list;
576 p->function_call_count = function_call_count;
577 p->temp_slots = temp_slots;
578 p->temp_slot_level = temp_slot_level;
579 p->target_temp_slot_level = target_temp_slot_level;
580 p->var_temp_slot_level = var_temp_slot_level;
581 p->fixup_var_refs_queue = 0;
582 p->epilogue_delay_list = current_function_epilogue_delay_list;
583 p->args_info = current_function_args_info;
584 p->check_memory_usage = current_function_check_memory_usage;
585 p->instrument_entry_exit = current_function_instrument_entry_exit;
587 save_tree_status (p, context);
588 save_storage_status (p);
589 save_emit_status (p);
590 save_expr_status (p);
591 save_stmt_status (p);
592 save_varasm_status (p, context);
593 if (save_machine_status)
594 (*save_machine_status) (p);
597 void
598 push_function_context ()
600 push_function_context_to (current_function_decl);
603 /* Restore the last saved context, at the end of a nested function.
604 This function is called from language-specific code. */
606 void
607 pop_function_context_from (context)
608 tree context;
610 struct function *p = outer_function_chain;
611 struct var_refs_queue *queue;
613 outer_function_chain = p->next;
615 current_function_contains_functions
616 = p->contains_functions || p->inline_obstacks
617 || context == current_function_decl;
618 current_function_has_computed_jump = p->has_computed_jump;
619 current_function_name = p->name;
620 current_function_decl = p->decl;
621 current_function_pops_args = p->pops_args;
622 current_function_returns_struct = p->returns_struct;
623 current_function_returns_pcc_struct = p->returns_pcc_struct;
624 current_function_returns_pointer = p->returns_pointer;
625 current_function_needs_context = p->needs_context;
626 current_function_calls_setjmp = p->calls_setjmp;
627 current_function_calls_longjmp = p->calls_longjmp;
628 current_function_calls_alloca = p->calls_alloca;
629 current_function_has_nonlocal_label = p->has_nonlocal_label;
630 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
631 current_function_is_thunk = p->is_thunk;
632 current_function_args_size = p->args_size;
633 current_function_pretend_args_size = p->pretend_args_size;
634 current_function_arg_offset_rtx = p->arg_offset_rtx;
635 current_function_varargs = p->varargs;
636 current_function_stdarg = p->stdarg;
637 current_function_uses_const_pool = p->uses_const_pool;
638 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
639 current_function_internal_arg_pointer = p->internal_arg_pointer;
640 current_function_cannot_inline = p->cannot_inline;
641 max_parm_reg = p->max_parm_reg;
642 parm_reg_stack_loc = p->parm_reg_stack_loc;
643 current_function_outgoing_args_size = p->outgoing_args_size;
644 current_function_return_rtx = p->return_rtx;
645 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
646 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
647 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
648 nonlocal_labels = p->nonlocal_labels;
649 cleanup_label = p->cleanup_label;
650 return_label = p->return_label;
651 save_expr_regs = p->save_expr_regs;
652 stack_slot_list = p->stack_slot_list;
653 parm_birth_insn = p->parm_birth_insn;
654 frame_offset = p->frame_offset;
655 tail_recursion_label = p->tail_recursion_label;
656 tail_recursion_reentry = p->tail_recursion_reentry;
657 arg_pointer_save_area = p->arg_pointer_save_area;
658 rtl_expr_chain = p->rtl_expr_chain;
659 last_parm_insn = p->last_parm_insn;
660 context_display = p->context_display;
661 trampoline_list = p->trampoline_list;
662 function_call_count = p->function_call_count;
663 temp_slots = p->temp_slots;
664 temp_slot_level = p->temp_slot_level;
665 target_temp_slot_level = p->target_temp_slot_level;
666 var_temp_slot_level = p->var_temp_slot_level;
667 current_function_epilogue_delay_list = p->epilogue_delay_list;
668 reg_renumber = 0;
669 current_function_args_info = p->args_info;
670 current_function_check_memory_usage = p->check_memory_usage;
671 current_function_instrument_entry_exit = p->instrument_entry_exit;
673 restore_tree_status (p, context);
674 restore_storage_status (p);
675 restore_expr_status (p);
676 restore_emit_status (p);
677 restore_stmt_status (p);
678 restore_varasm_status (p);
680 if (restore_machine_status)
681 (*restore_machine_status) (p);
683 /* Finish doing put_var_into_stack for any of our variables
684 which became addressable during the nested function. */
685 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
686 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
688 free (p);
690 /* Reset variables that have known state during rtx generation. */
691 rtx_equal_function_value_matters = 1;
692 virtuals_instantiated = 0;
695 void pop_function_context ()
697 pop_function_context_from (current_function_decl);
700 /* Allocate fixed slots in the stack frame of the current function. */
702 /* Return size needed for stack frame based on slots so far allocated.
703 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
704 the caller may have to do that. */
706 HOST_WIDE_INT
707 get_frame_size ()
709 #ifdef FRAME_GROWS_DOWNWARD
710 return -frame_offset;
711 #else
712 return frame_offset;
713 #endif
716 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
717 with machine mode MODE.
719 ALIGN controls the amount of alignment for the address of the slot:
720 0 means according to MODE,
721 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
722 positive specifies alignment boundary in bits.
724 We do not round to stack_boundary here. */
727 assign_stack_local (mode, size, align)
728 enum machine_mode mode;
729 HOST_WIDE_INT size;
730 int align;
732 register rtx x, addr;
733 int bigend_correction = 0;
734 int alignment;
736 if (align == 0)
738 tree type;
740 alignment = GET_MODE_ALIGNMENT (mode);
741 if (mode == BLKmode)
742 alignment = BIGGEST_ALIGNMENT;
744 /* Allow the target to (possibly) increase the alignment of this
745 stack slot. */
746 type = type_for_mode (mode, 0);
747 if (type)
748 alignment = LOCAL_ALIGNMENT (type, alignment);
750 alignment /= BITS_PER_UNIT;
752 else if (align == -1)
754 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
755 size = CEIL_ROUND (size, alignment);
757 else
758 alignment = align / BITS_PER_UNIT;
760 /* Round frame offset to that alignment.
761 We must be careful here, since FRAME_OFFSET might be negative and
762 division with a negative dividend isn't as well defined as we might
763 like. So we instead assume that ALIGNMENT is a power of two and
764 use logical operations which are unambiguous. */
765 #ifdef FRAME_GROWS_DOWNWARD
766 frame_offset = FLOOR_ROUND (frame_offset, alignment);
767 #else
768 frame_offset = CEIL_ROUND (frame_offset, alignment);
769 #endif
771 /* On a big-endian machine, if we are allocating more space than we will use,
772 use the least significant bytes of those that are allocated. */
773 if (BYTES_BIG_ENDIAN && mode != BLKmode)
774 bigend_correction = size - GET_MODE_SIZE (mode);
776 #ifdef FRAME_GROWS_DOWNWARD
777 frame_offset -= size;
778 #endif
780 /* If we have already instantiated virtual registers, return the actual
781 address relative to the frame pointer. */
782 if (virtuals_instantiated)
783 addr = plus_constant (frame_pointer_rtx,
784 (frame_offset + bigend_correction
785 + STARTING_FRAME_OFFSET));
786 else
787 addr = plus_constant (virtual_stack_vars_rtx,
788 frame_offset + bigend_correction);
790 #ifndef FRAME_GROWS_DOWNWARD
791 frame_offset += size;
792 #endif
794 x = gen_rtx_MEM (mode, addr);
796 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
798 return x;
801 /* Assign a stack slot in a containing function.
802 First three arguments are same as in preceding function.
803 The last argument specifies the function to allocate in. */
805 static rtx
806 assign_outer_stack_local (mode, size, align, function)
807 enum machine_mode mode;
808 HOST_WIDE_INT size;
809 int align;
810 struct function *function;
812 register rtx x, addr;
813 int bigend_correction = 0;
814 int alignment;
816 /* Allocate in the memory associated with the function in whose frame
817 we are assigning. */
818 push_obstacks (function->function_obstack,
819 function->function_maybepermanent_obstack);
821 if (align == 0)
823 tree type;
825 alignment = GET_MODE_ALIGNMENT (mode);
826 if (mode == BLKmode)
827 alignment = BIGGEST_ALIGNMENT;
829 /* Allow the target to (possibly) increase the alignment of this
830 stack slot. */
831 type = type_for_mode (mode, 0);
832 if (type)
833 alignment = LOCAL_ALIGNMENT (type, alignment);
835 alignment /= BITS_PER_UNIT;
837 else if (align == -1)
839 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
840 size = CEIL_ROUND (size, alignment);
842 else
843 alignment = align / BITS_PER_UNIT;
845 /* Round frame offset to that alignment. */
846 #ifdef FRAME_GROWS_DOWNWARD
847 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
848 #else
849 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
850 #endif
852 /* On a big-endian machine, if we are allocating more space than we will use,
853 use the least significant bytes of those that are allocated. */
854 if (BYTES_BIG_ENDIAN && mode != BLKmode)
855 bigend_correction = size - GET_MODE_SIZE (mode);
857 #ifdef FRAME_GROWS_DOWNWARD
858 function->frame_offset -= size;
859 #endif
860 addr = plus_constant (virtual_stack_vars_rtx,
861 function->frame_offset + bigend_correction);
862 #ifndef FRAME_GROWS_DOWNWARD
863 function->frame_offset += size;
864 #endif
866 x = gen_rtx_MEM (mode, addr);
868 function->stack_slot_list
869 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
871 pop_obstacks ();
873 return x;
876 /* Allocate a temporary stack slot and record it for possible later
877 reuse.
879 MODE is the machine mode to be given to the returned rtx.
881 SIZE is the size in units of the space required. We do no rounding here
882 since assign_stack_local will do any required rounding.
884 KEEP is 1 if this slot is to be retained after a call to
885 free_temp_slots. Automatic variables for a block are allocated
886 with this flag. KEEP is 2 if we allocate a longer term temporary,
887 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
888 if we are to allocate something at an inner level to be treated as
889 a variable in the block (e.g., a SAVE_EXPR).
891 TYPE is the type that will be used for the stack slot. */
893 static rtx
894 assign_stack_temp_for_type (mode, size, keep, type)
895 enum machine_mode mode;
896 HOST_WIDE_INT size;
897 int keep;
898 tree type;
900 int align;
901 int alias_set;
902 struct temp_slot *p, *best_p = 0;
904 /* If SIZE is -1 it means that somebody tried to allocate a temporary
905 of a variable size. */
906 if (size == -1)
907 abort ();
909 /* If we know the alias set for the memory that will be used, use
910 it. If there's no TYPE, then we don't know anything about the
911 alias set for the memory. */
912 if (type)
913 alias_set = get_alias_set (type);
914 else
915 alias_set = 0;
917 align = GET_MODE_ALIGNMENT (mode);
918 if (mode == BLKmode)
919 align = BIGGEST_ALIGNMENT;
921 if (! type)
922 type = type_for_mode (mode, 0);
923 if (type)
924 align = LOCAL_ALIGNMENT (type, align);
926 /* Try to find an available, already-allocated temporary of the proper
927 mode which meets the size and alignment requirements. Choose the
928 smallest one with the closest alignment. */
929 for (p = temp_slots; p; p = p->next)
930 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
931 && ! p->in_use
932 && (!flag_strict_aliasing
933 || (alias_set && p->alias_set == alias_set))
934 && (best_p == 0 || best_p->size > p->size
935 || (best_p->size == p->size && best_p->align > p->align)))
937 if (p->align == align && p->size == size)
939 best_p = 0;
940 break;
942 best_p = p;
945 /* Make our best, if any, the one to use. */
946 if (best_p)
948 /* If there are enough aligned bytes left over, make them into a new
949 temp_slot so that the extra bytes don't get wasted. Do this only
950 for BLKmode slots, so that we can be sure of the alignment. */
951 if (GET_MODE (best_p->slot) == BLKmode
952 /* We can't split slots if -fstrict-aliasing because the
953 information about the alias set for the new slot will be
954 lost. */
955 && !flag_strict_aliasing)
957 int alignment = best_p->align / BITS_PER_UNIT;
958 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
960 if (best_p->size - rounded_size >= alignment)
962 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
963 p->in_use = p->addr_taken = 0;
964 p->size = best_p->size - rounded_size;
965 p->base_offset = best_p->base_offset + rounded_size;
966 p->full_size = best_p->full_size - rounded_size;
967 p->slot = gen_rtx_MEM (BLKmode,
968 plus_constant (XEXP (best_p->slot, 0),
969 rounded_size));
970 p->align = best_p->align;
971 p->address = 0;
972 p->rtl_expr = 0;
973 p->next = temp_slots;
974 temp_slots = p;
976 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
977 stack_slot_list);
979 best_p->size = rounded_size;
980 best_p->full_size = rounded_size;
984 p = best_p;
987 /* If we still didn't find one, make a new temporary. */
988 if (p == 0)
990 HOST_WIDE_INT frame_offset_old = frame_offset;
992 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
994 /* We are passing an explicit alignment request to assign_stack_local.
995 One side effect of that is assign_stack_local will not round SIZE
996 to ensure the frame offset remains suitably aligned.
998 So for requests which depended on the rounding of SIZE, we go ahead
999 and round it now. We also make sure ALIGNMENT is at least
1000 BIGGEST_ALIGNMENT. */
1001 if (mode == BLKmode && align < (BIGGEST_ALIGNMENT / BITS_PER_UNIT))
1002 abort();
1003 p->slot = assign_stack_local (mode,
1004 mode == BLKmode
1005 ? CEIL_ROUND (size, align) : size,
1006 align);
1008 p->align = align;
1009 p->alias_set = alias_set;
1011 /* The following slot size computation is necessary because we don't
1012 know the actual size of the temporary slot until assign_stack_local
1013 has performed all the frame alignment and size rounding for the
1014 requested temporary. Note that extra space added for alignment
1015 can be either above or below this stack slot depending on which
1016 way the frame grows. We include the extra space if and only if it
1017 is above this slot. */
1018 #ifdef FRAME_GROWS_DOWNWARD
1019 p->size = frame_offset_old - frame_offset;
1020 #else
1021 p->size = size;
1022 #endif
1024 /* Now define the fields used by combine_temp_slots. */
1025 #ifdef FRAME_GROWS_DOWNWARD
1026 p->base_offset = frame_offset;
1027 p->full_size = frame_offset_old - frame_offset;
1028 #else
1029 p->base_offset = frame_offset_old;
1030 p->full_size = frame_offset - frame_offset_old;
1031 #endif
1032 p->address = 0;
1033 p->next = temp_slots;
1034 temp_slots = p;
1037 p->in_use = 1;
1038 p->addr_taken = 0;
1039 p->rtl_expr = sequence_rtl_expr;
1041 if (keep == 2)
1043 p->level = target_temp_slot_level;
1044 p->keep = 0;
1046 else if (keep == 3)
1048 p->level = var_temp_slot_level;
1049 p->keep = 0;
1051 else
1053 p->level = temp_slot_level;
1054 p->keep = keep;
1057 /* We may be reusing an old slot, so clear any MEM flags that may have been
1058 set from before. */
1059 RTX_UNCHANGING_P (p->slot) = 0;
1060 MEM_IN_STRUCT_P (p->slot) = 0;
1061 MEM_SCALAR_P (p->slot) = 0;
1062 MEM_ALIAS_SET (p->slot) = 0;
1063 return p->slot;
1066 /* Allocate a temporary stack slot and record it for possible later
1067 reuse. First three arguments are same as in preceding function. */
1070 assign_stack_temp (mode, size, keep)
1071 enum machine_mode mode;
1072 HOST_WIDE_INT size;
1073 int keep;
1075 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1078 /* Assign a temporary of given TYPE.
1079 KEEP is as for assign_stack_temp.
1080 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1081 it is 0 if a register is OK.
1082 DONT_PROMOTE is 1 if we should not promote values in register
1083 to wider modes. */
1086 assign_temp (type, keep, memory_required, dont_promote)
1087 tree type;
1088 int keep;
1089 int memory_required;
1090 int dont_promote;
1092 enum machine_mode mode = TYPE_MODE (type);
1093 int unsignedp = TREE_UNSIGNED (type);
1095 if (mode == BLKmode || memory_required)
1097 HOST_WIDE_INT size = int_size_in_bytes (type);
1098 rtx tmp;
1100 /* Unfortunately, we don't yet know how to allocate variable-sized
1101 temporaries. However, sometimes we have a fixed upper limit on
1102 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1103 instead. This is the case for Chill variable-sized strings. */
1104 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1105 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1106 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1107 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1109 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1110 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1111 return tmp;
1114 #ifndef PROMOTE_FOR_CALL_ONLY
1115 if (! dont_promote)
1116 mode = promote_mode (type, mode, &unsignedp, 0);
1117 #endif
1119 return gen_reg_rtx (mode);
1122 /* Combine temporary stack slots which are adjacent on the stack.
1124 This allows for better use of already allocated stack space. This is only
1125 done for BLKmode slots because we can be sure that we won't have alignment
1126 problems in this case. */
1128 void
1129 combine_temp_slots ()
1131 struct temp_slot *p, *q;
1132 struct temp_slot *prev_p, *prev_q;
1133 int num_slots;
1135 /* We can't combine slots, because the information about which slot
1136 is in which alias set will be lost. */
1137 if (flag_strict_aliasing)
1138 return;
1140 /* If there are a lot of temp slots, don't do anything unless
1141 high levels of optimizaton. */
1142 if (! flag_expensive_optimizations)
1143 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1144 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1145 return;
1147 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1149 int delete_p = 0;
1151 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1152 for (q = p->next, prev_q = p; q; q = prev_q->next)
1154 int delete_q = 0;
1155 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1157 if (p->base_offset + p->full_size == q->base_offset)
1159 /* Q comes after P; combine Q into P. */
1160 p->size += q->size;
1161 p->full_size += q->full_size;
1162 delete_q = 1;
1164 else if (q->base_offset + q->full_size == p->base_offset)
1166 /* P comes after Q; combine P into Q. */
1167 q->size += p->size;
1168 q->full_size += p->full_size;
1169 delete_p = 1;
1170 break;
1173 /* Either delete Q or advance past it. */
1174 if (delete_q)
1175 prev_q->next = q->next;
1176 else
1177 prev_q = q;
1179 /* Either delete P or advance past it. */
1180 if (delete_p)
1182 if (prev_p)
1183 prev_p->next = p->next;
1184 else
1185 temp_slots = p->next;
1187 else
1188 prev_p = p;
1192 /* Find the temp slot corresponding to the object at address X. */
1194 static struct temp_slot *
1195 find_temp_slot_from_address (x)
1196 rtx x;
1198 struct temp_slot *p;
1199 rtx next;
1201 for (p = temp_slots; p; p = p->next)
1203 if (! p->in_use)
1204 continue;
1206 else if (XEXP (p->slot, 0) == x
1207 || p->address == x
1208 || (GET_CODE (x) == PLUS
1209 && XEXP (x, 0) == virtual_stack_vars_rtx
1210 && GET_CODE (XEXP (x, 1)) == CONST_INT
1211 && INTVAL (XEXP (x, 1)) >= p->base_offset
1212 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1213 return p;
1215 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1216 for (next = p->address; next; next = XEXP (next, 1))
1217 if (XEXP (next, 0) == x)
1218 return p;
1221 return 0;
1224 /* Indicate that NEW is an alternate way of referring to the temp slot
1225 that previously was known by OLD. */
1227 void
1228 update_temp_slot_address (old, new)
1229 rtx old, new;
1231 struct temp_slot *p = find_temp_slot_from_address (old);
1233 /* If none, return. Else add NEW as an alias. */
1234 if (p == 0)
1235 return;
1236 else if (p->address == 0)
1237 p->address = new;
1238 else
1240 if (GET_CODE (p->address) != EXPR_LIST)
1241 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1243 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1247 /* If X could be a reference to a temporary slot, mark the fact that its
1248 address was taken. */
1250 void
1251 mark_temp_addr_taken (x)
1252 rtx x;
1254 struct temp_slot *p;
1256 if (x == 0)
1257 return;
1259 /* If X is not in memory or is at a constant address, it cannot be in
1260 a temporary slot. */
1261 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1262 return;
1264 p = find_temp_slot_from_address (XEXP (x, 0));
1265 if (p != 0)
1266 p->addr_taken = 1;
1269 /* If X could be a reference to a temporary slot, mark that slot as
1270 belonging to the to one level higher than the current level. If X
1271 matched one of our slots, just mark that one. Otherwise, we can't
1272 easily predict which it is, so upgrade all of them. Kept slots
1273 need not be touched.
1275 This is called when an ({...}) construct occurs and a statement
1276 returns a value in memory. */
1278 void
1279 preserve_temp_slots (x)
1280 rtx x;
1282 struct temp_slot *p = 0;
1284 /* If there is no result, we still might have some objects whose address
1285 were taken, so we need to make sure they stay around. */
1286 if (x == 0)
1288 for (p = temp_slots; p; p = p->next)
1289 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1290 p->level--;
1292 return;
1295 /* If X is a register that is being used as a pointer, see if we have
1296 a temporary slot we know it points to. To be consistent with
1297 the code below, we really should preserve all non-kept slots
1298 if we can't find a match, but that seems to be much too costly. */
1299 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1300 p = find_temp_slot_from_address (x);
1302 /* If X is not in memory or is at a constant address, it cannot be in
1303 a temporary slot, but it can contain something whose address was
1304 taken. */
1305 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1307 for (p = temp_slots; p; p = p->next)
1308 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1309 p->level--;
1311 return;
1314 /* First see if we can find a match. */
1315 if (p == 0)
1316 p = find_temp_slot_from_address (XEXP (x, 0));
1318 if (p != 0)
1320 /* Move everything at our level whose address was taken to our new
1321 level in case we used its address. */
1322 struct temp_slot *q;
1324 if (p->level == temp_slot_level)
1326 for (q = temp_slots; q; q = q->next)
1327 if (q != p && q->addr_taken && q->level == p->level)
1328 q->level--;
1330 p->level--;
1331 p->addr_taken = 0;
1333 return;
1336 /* Otherwise, preserve all non-kept slots at this level. */
1337 for (p = temp_slots; p; p = p->next)
1338 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1339 p->level--;
1342 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1343 with that RTL_EXPR, promote it into a temporary slot at the present
1344 level so it will not be freed when we free slots made in the
1345 RTL_EXPR. */
1347 void
1348 preserve_rtl_expr_result (x)
1349 rtx x;
1351 struct temp_slot *p;
1353 /* If X is not in memory or is at a constant address, it cannot be in
1354 a temporary slot. */
1355 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1356 return;
1358 /* If we can find a match, move it to our level unless it is already at
1359 an upper level. */
1360 p = find_temp_slot_from_address (XEXP (x, 0));
1361 if (p != 0)
1363 p->level = MIN (p->level, temp_slot_level);
1364 p->rtl_expr = 0;
1367 return;
1370 /* Free all temporaries used so far. This is normally called at the end
1371 of generating code for a statement. Don't free any temporaries
1372 currently in use for an RTL_EXPR that hasn't yet been emitted.
1373 We could eventually do better than this since it can be reused while
1374 generating the same RTL_EXPR, but this is complex and probably not
1375 worthwhile. */
1377 void
1378 free_temp_slots ()
1380 struct temp_slot *p;
1382 for (p = temp_slots; p; p = p->next)
1383 if (p->in_use && p->level == temp_slot_level && ! p->keep
1384 && p->rtl_expr == 0)
1385 p->in_use = 0;
1387 combine_temp_slots ();
1390 /* Free all temporary slots used in T, an RTL_EXPR node. */
1392 void
1393 free_temps_for_rtl_expr (t)
1394 tree t;
1396 struct temp_slot *p;
1398 for (p = temp_slots; p; p = p->next)
1399 if (p->rtl_expr == t)
1400 p->in_use = 0;
1402 combine_temp_slots ();
1405 /* Mark all temporaries ever allocated in this function as not suitable
1406 for reuse until the current level is exited. */
1408 void
1409 mark_all_temps_used ()
1411 struct temp_slot *p;
1413 for (p = temp_slots; p; p = p->next)
1415 p->in_use = p->keep = 1;
1416 p->level = MIN (p->level, temp_slot_level);
1420 /* Push deeper into the nesting level for stack temporaries. */
1422 void
1423 push_temp_slots ()
1425 temp_slot_level++;
1428 /* Likewise, but save the new level as the place to allocate variables
1429 for blocks. */
1431 void
1432 push_temp_slots_for_block ()
1434 push_temp_slots ();
1436 var_temp_slot_level = temp_slot_level;
1439 /* Likewise, but save the new level as the place to allocate temporaries
1440 for TARGET_EXPRs. */
1442 void
1443 push_temp_slots_for_target ()
1445 push_temp_slots ();
1447 target_temp_slot_level = temp_slot_level;
1450 /* Set and get the value of target_temp_slot_level. The only
1451 permitted use of these functions is to save and restore this value. */
1454 get_target_temp_slot_level ()
1456 return target_temp_slot_level;
1459 void
1460 set_target_temp_slot_level (level)
1461 int level;
1463 target_temp_slot_level = level;
1466 /* Pop a temporary nesting level. All slots in use in the current level
1467 are freed. */
1469 void
1470 pop_temp_slots ()
1472 struct temp_slot *p;
1474 for (p = temp_slots; p; p = p->next)
1475 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1476 p->in_use = 0;
1478 combine_temp_slots ();
1480 temp_slot_level--;
1483 /* Initialize temporary slots. */
1485 void
1486 init_temp_slots ()
1488 /* We have not allocated any temporaries yet. */
1489 temp_slots = 0;
1490 temp_slot_level = 0;
1491 var_temp_slot_level = 0;
1492 target_temp_slot_level = 0;
1495 /* Retroactively move an auto variable from a register to a stack slot.
1496 This is done when an address-reference to the variable is seen. */
1498 void
1499 put_var_into_stack (decl)
1500 tree decl;
1502 register rtx reg;
1503 enum machine_mode promoted_mode, decl_mode;
1504 struct function *function = 0;
1505 tree context;
1506 int can_use_addressof;
1508 context = decl_function_context (decl);
1510 /* Get the current rtl used for this object and its original mode. */
1511 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1513 /* No need to do anything if decl has no rtx yet
1514 since in that case caller is setting TREE_ADDRESSABLE
1515 and a stack slot will be assigned when the rtl is made. */
1516 if (reg == 0)
1517 return;
1519 /* Get the declared mode for this object. */
1520 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1521 : DECL_MODE (decl));
1522 /* Get the mode it's actually stored in. */
1523 promoted_mode = GET_MODE (reg);
1525 /* If this variable comes from an outer function,
1526 find that function's saved context. */
1527 if (context != current_function_decl && context != inline_function_decl)
1528 for (function = outer_function_chain; function; function = function->next)
1529 if (function->decl == context)
1530 break;
1532 /* If this is a variable-size object with a pseudo to address it,
1533 put that pseudo into the stack, if the var is nonlocal. */
1534 if (DECL_NONLOCAL (decl)
1535 && GET_CODE (reg) == MEM
1536 && GET_CODE (XEXP (reg, 0)) == REG
1537 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1539 reg = XEXP (reg, 0);
1540 decl_mode = promoted_mode = GET_MODE (reg);
1543 can_use_addressof
1544 = (function == 0
1545 && optimize > 0
1546 /* FIXME make it work for promoted modes too */
1547 && decl_mode == promoted_mode
1548 #ifdef NON_SAVING_SETJMP
1549 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1550 #endif
1553 /* If we can't use ADDRESSOF, make sure we see through one we already
1554 generated. */
1555 if (! can_use_addressof && GET_CODE (reg) == MEM
1556 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1557 reg = XEXP (XEXP (reg, 0), 0);
1559 /* Now we should have a value that resides in one or more pseudo regs. */
1561 if (GET_CODE (reg) == REG)
1563 /* If this variable lives in the current function and we don't need
1564 to put things in the stack for the sake of setjmp, try to keep it
1565 in a register until we know we actually need the address. */
1566 if (can_use_addressof)
1567 gen_mem_addressof (reg, decl);
1568 else
1569 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1570 promoted_mode, decl_mode,
1571 TREE_SIDE_EFFECTS (decl), 0,
1572 TREE_USED (decl)
1573 || DECL_INITIAL (decl) != 0);
1575 else if (GET_CODE (reg) == CONCAT)
1577 /* A CONCAT contains two pseudos; put them both in the stack.
1578 We do it so they end up consecutive. */
1579 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1580 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1581 #ifdef FRAME_GROWS_DOWNWARD
1582 /* Since part 0 should have a lower address, do it second. */
1583 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1584 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1585 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1586 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1587 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1588 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1589 #else
1590 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1591 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1592 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1593 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1594 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1595 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1596 #endif
1598 /* Change the CONCAT into a combined MEM for both parts. */
1599 PUT_CODE (reg, MEM);
1600 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1601 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1603 /* The two parts are in memory order already.
1604 Use the lower parts address as ours. */
1605 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1606 /* Prevent sharing of rtl that might lose. */
1607 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1608 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1610 else
1611 return;
1613 if (current_function_check_memory_usage)
1614 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1615 XEXP (reg, 0), ptr_mode,
1616 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1617 TYPE_MODE (sizetype),
1618 GEN_INT (MEMORY_USE_RW),
1619 TYPE_MODE (integer_type_node));
1622 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1623 into the stack frame of FUNCTION (0 means the current function).
1624 DECL_MODE is the machine mode of the user-level data type.
1625 PROMOTED_MODE is the machine mode of the register.
1626 VOLATILE_P is nonzero if this is for a "volatile" decl.
1627 USED_P is nonzero if this reg might have already been used in an insn. */
1629 static void
1630 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1631 original_regno, used_p)
1632 struct function *function;
1633 rtx reg;
1634 tree type;
1635 enum machine_mode promoted_mode, decl_mode;
1636 int volatile_p;
1637 int original_regno;
1638 int used_p;
1640 rtx new = 0;
1641 int regno = original_regno;
1643 if (regno == 0)
1644 regno = REGNO (reg);
1646 if (function)
1648 if (regno < function->max_parm_reg)
1649 new = function->parm_reg_stack_loc[regno];
1650 if (new == 0)
1651 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1652 0, function);
1654 else
1656 if (regno < max_parm_reg)
1657 new = parm_reg_stack_loc[regno];
1658 if (new == 0)
1659 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1662 PUT_MODE (reg, decl_mode);
1663 XEXP (reg, 0) = XEXP (new, 0);
1664 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1665 MEM_VOLATILE_P (reg) = volatile_p;
1666 PUT_CODE (reg, MEM);
1668 /* If this is a memory ref that contains aggregate components,
1669 mark it as such for cse and loop optimize. If we are reusing a
1670 previously generated stack slot, then we need to copy the bit in
1671 case it was set for other reasons. For instance, it is set for
1672 __builtin_va_alist. */
1673 MEM_SET_IN_STRUCT_P (reg,
1674 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1675 MEM_ALIAS_SET (reg) = get_alias_set (type);
1677 /* Now make sure that all refs to the variable, previously made
1678 when it was a register, are fixed up to be valid again. */
1680 if (used_p && function != 0)
1682 struct var_refs_queue *temp;
1684 /* Variable is inherited; fix it up when we get back to its function. */
1685 push_obstacks (function->function_obstack,
1686 function->function_maybepermanent_obstack);
1688 /* See comment in restore_tree_status in tree.c for why this needs to be
1689 on saveable obstack. */
1690 temp
1691 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1692 temp->modified = reg;
1693 temp->promoted_mode = promoted_mode;
1694 temp->unsignedp = TREE_UNSIGNED (type);
1695 temp->next = function->fixup_var_refs_queue;
1696 function->fixup_var_refs_queue = temp;
1697 pop_obstacks ();
1699 else if (used_p)
1700 /* Variable is local; fix it up now. */
1701 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1704 static void
1705 fixup_var_refs (var, promoted_mode, unsignedp)
1706 rtx var;
1707 enum machine_mode promoted_mode;
1708 int unsignedp;
1710 tree pending;
1711 rtx first_insn = get_insns ();
1712 struct sequence_stack *stack = sequence_stack;
1713 tree rtl_exps = rtl_expr_chain;
1715 /* Must scan all insns for stack-refs that exceed the limit. */
1716 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1718 /* Scan all pending sequences too. */
1719 for (; stack; stack = stack->next)
1721 push_to_sequence (stack->first);
1722 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1723 stack->first, stack->next != 0);
1724 /* Update remembered end of sequence
1725 in case we added an insn at the end. */
1726 stack->last = get_last_insn ();
1727 end_sequence ();
1730 /* Scan all waiting RTL_EXPRs too. */
1731 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1733 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1734 if (seq != const0_rtx && seq != 0)
1736 push_to_sequence (seq);
1737 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1738 end_sequence ();
1742 /* Scan the catch clauses for exception handling too. */
1743 push_to_sequence (catch_clauses);
1744 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0);
1745 end_sequence ();
1748 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1749 some part of an insn. Return a struct fixup_replacement whose OLD
1750 value is equal to X. Allocate a new structure if no such entry exists. */
1752 static struct fixup_replacement *
1753 find_fixup_replacement (replacements, x)
1754 struct fixup_replacement **replacements;
1755 rtx x;
1757 struct fixup_replacement *p;
1759 /* See if we have already replaced this. */
1760 for (p = *replacements; p && p->old != x; p = p->next)
1763 if (p == 0)
1765 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1766 p->old = x;
1767 p->new = 0;
1768 p->next = *replacements;
1769 *replacements = p;
1772 return p;
1775 /* Scan the insn-chain starting with INSN for refs to VAR
1776 and fix them up. TOPLEVEL is nonzero if this chain is the
1777 main chain of insns for the current function. */
1779 static void
1780 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1781 rtx var;
1782 enum machine_mode promoted_mode;
1783 int unsignedp;
1784 rtx insn;
1785 int toplevel;
1787 rtx call_dest = 0;
1789 while (insn)
1791 rtx next = NEXT_INSN (insn);
1792 rtx set, prev, prev_set;
1793 rtx note;
1795 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1797 /* If this is a CLOBBER of VAR, delete it.
1799 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1800 and REG_RETVAL notes too. */
1801 if (GET_CODE (PATTERN (insn)) == CLOBBER
1802 && (XEXP (PATTERN (insn), 0) == var
1803 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1804 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1805 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1807 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1808 /* The REG_LIBCALL note will go away since we are going to
1809 turn INSN into a NOTE, so just delete the
1810 corresponding REG_RETVAL note. */
1811 remove_note (XEXP (note, 0),
1812 find_reg_note (XEXP (note, 0), REG_RETVAL,
1813 NULL_RTX));
1815 /* In unoptimized compilation, we shouldn't call delete_insn
1816 except in jump.c doing warnings. */
1817 PUT_CODE (insn, NOTE);
1818 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1819 NOTE_SOURCE_FILE (insn) = 0;
1822 /* The insn to load VAR from a home in the arglist
1823 is now a no-op. When we see it, just delete it.
1824 Similarly if this is storing VAR from a register from which
1825 it was loaded in the previous insn. This will occur
1826 when an ADDRESSOF was made for an arglist slot. */
1827 else if (toplevel
1828 && (set = single_set (insn)) != 0
1829 && SET_DEST (set) == var
1830 /* If this represents the result of an insn group,
1831 don't delete the insn. */
1832 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1833 && (rtx_equal_p (SET_SRC (set), var)
1834 || (GET_CODE (SET_SRC (set)) == REG
1835 && (prev = prev_nonnote_insn (insn)) != 0
1836 && (prev_set = single_set (prev)) != 0
1837 && SET_DEST (prev_set) == SET_SRC (set)
1838 && rtx_equal_p (SET_SRC (prev_set), var))))
1840 /* In unoptimized compilation, we shouldn't call delete_insn
1841 except in jump.c doing warnings. */
1842 PUT_CODE (insn, NOTE);
1843 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1844 NOTE_SOURCE_FILE (insn) = 0;
1845 if (insn == last_parm_insn)
1846 last_parm_insn = PREV_INSN (next);
1848 else
1850 struct fixup_replacement *replacements = 0;
1851 rtx next_insn = NEXT_INSN (insn);
1853 if (SMALL_REGISTER_CLASSES)
1855 /* If the insn that copies the results of a CALL_INSN
1856 into a pseudo now references VAR, we have to use an
1857 intermediate pseudo since we want the life of the
1858 return value register to be only a single insn.
1860 If we don't use an intermediate pseudo, such things as
1861 address computations to make the address of VAR valid
1862 if it is not can be placed between the CALL_INSN and INSN.
1864 To make sure this doesn't happen, we record the destination
1865 of the CALL_INSN and see if the next insn uses both that
1866 and VAR. */
1868 if (call_dest != 0 && GET_CODE (insn) == INSN
1869 && reg_mentioned_p (var, PATTERN (insn))
1870 && reg_mentioned_p (call_dest, PATTERN (insn)))
1872 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1874 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1876 PATTERN (insn) = replace_rtx (PATTERN (insn),
1877 call_dest, temp);
1880 if (GET_CODE (insn) == CALL_INSN
1881 && GET_CODE (PATTERN (insn)) == SET)
1882 call_dest = SET_DEST (PATTERN (insn));
1883 else if (GET_CODE (insn) == CALL_INSN
1884 && GET_CODE (PATTERN (insn)) == PARALLEL
1885 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1886 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1887 else
1888 call_dest = 0;
1891 /* See if we have to do anything to INSN now that VAR is in
1892 memory. If it needs to be loaded into a pseudo, use a single
1893 pseudo for the entire insn in case there is a MATCH_DUP
1894 between two operands. We pass a pointer to the head of
1895 a list of struct fixup_replacements. If fixup_var_refs_1
1896 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1897 it will record them in this list.
1899 If it allocated a pseudo for any replacement, we copy into
1900 it here. */
1902 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1903 &replacements);
1905 /* If this is last_parm_insn, and any instructions were output
1906 after it to fix it up, then we must set last_parm_insn to
1907 the last such instruction emitted. */
1908 if (insn == last_parm_insn)
1909 last_parm_insn = PREV_INSN (next_insn);
1911 while (replacements)
1913 if (GET_CODE (replacements->new) == REG)
1915 rtx insert_before;
1916 rtx seq;
1918 /* OLD might be a (subreg (mem)). */
1919 if (GET_CODE (replacements->old) == SUBREG)
1920 replacements->old
1921 = fixup_memory_subreg (replacements->old, insn, 0);
1922 else
1923 replacements->old
1924 = fixup_stack_1 (replacements->old, insn);
1926 insert_before = insn;
1928 /* If we are changing the mode, do a conversion.
1929 This might be wasteful, but combine.c will
1930 eliminate much of the waste. */
1932 if (GET_MODE (replacements->new)
1933 != GET_MODE (replacements->old))
1935 start_sequence ();
1936 convert_move (replacements->new,
1937 replacements->old, unsignedp);
1938 seq = gen_sequence ();
1939 end_sequence ();
1941 else
1942 seq = gen_move_insn (replacements->new,
1943 replacements->old);
1945 emit_insn_before (seq, insert_before);
1948 replacements = replacements->next;
1952 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1953 But don't touch other insns referred to by reg-notes;
1954 we will get them elsewhere. */
1955 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1956 if (GET_CODE (note) != INSN_LIST)
1957 XEXP (note, 0)
1958 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1960 insn = next;
1964 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1965 See if the rtx expression at *LOC in INSN needs to be changed.
1967 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1968 contain a list of original rtx's and replacements. If we find that we need
1969 to modify this insn by replacing a memory reference with a pseudo or by
1970 making a new MEM to implement a SUBREG, we consult that list to see if
1971 we have already chosen a replacement. If none has already been allocated,
1972 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1973 or the SUBREG, as appropriate, to the pseudo. */
1975 static void
1976 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1977 register rtx var;
1978 enum machine_mode promoted_mode;
1979 register rtx *loc;
1980 rtx insn;
1981 struct fixup_replacement **replacements;
1983 register int i;
1984 register rtx x = *loc;
1985 RTX_CODE code = GET_CODE (x);
1986 register char *fmt;
1987 register rtx tem, tem1;
1988 struct fixup_replacement *replacement;
1990 switch (code)
1992 case ADDRESSOF:
1993 if (XEXP (x, 0) == var)
1995 /* Prevent sharing of rtl that might lose. */
1996 rtx sub = copy_rtx (XEXP (var, 0));
1998 start_sequence ();
2000 if (! validate_change (insn, loc, sub, 0))
2002 rtx y = force_operand (sub, NULL_RTX);
2004 if (! validate_change (insn, loc, y, 0))
2005 *loc = copy_to_reg (y);
2008 emit_insn_before (gen_sequence (), insn);
2009 end_sequence ();
2011 return;
2013 case MEM:
2014 if (var == x)
2016 /* If we already have a replacement, use it. Otherwise,
2017 try to fix up this address in case it is invalid. */
2019 replacement = find_fixup_replacement (replacements, var);
2020 if (replacement->new)
2022 *loc = replacement->new;
2023 return;
2026 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2028 /* Unless we are forcing memory to register or we changed the mode,
2029 we can leave things the way they are if the insn is valid. */
2031 INSN_CODE (insn) = -1;
2032 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2033 && recog_memoized (insn) >= 0)
2034 return;
2036 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2037 return;
2040 /* If X contains VAR, we need to unshare it here so that we update
2041 each occurrence separately. But all identical MEMs in one insn
2042 must be replaced with the same rtx because of the possibility of
2043 MATCH_DUPs. */
2045 if (reg_mentioned_p (var, x))
2047 replacement = find_fixup_replacement (replacements, x);
2048 if (replacement->new == 0)
2049 replacement->new = copy_most_rtx (x, var);
2051 *loc = x = replacement->new;
2053 break;
2055 case REG:
2056 case CC0:
2057 case PC:
2058 case CONST_INT:
2059 case CONST:
2060 case SYMBOL_REF:
2061 case LABEL_REF:
2062 case CONST_DOUBLE:
2063 return;
2065 case SIGN_EXTRACT:
2066 case ZERO_EXTRACT:
2067 /* Note that in some cases those types of expressions are altered
2068 by optimize_bit_field, and do not survive to get here. */
2069 if (XEXP (x, 0) == var
2070 || (GET_CODE (XEXP (x, 0)) == SUBREG
2071 && SUBREG_REG (XEXP (x, 0)) == var))
2073 /* Get TEM as a valid MEM in the mode presently in the insn.
2075 We don't worry about the possibility of MATCH_DUP here; it
2076 is highly unlikely and would be tricky to handle. */
2078 tem = XEXP (x, 0);
2079 if (GET_CODE (tem) == SUBREG)
2081 if (GET_MODE_BITSIZE (GET_MODE (tem))
2082 > GET_MODE_BITSIZE (GET_MODE (var)))
2084 replacement = find_fixup_replacement (replacements, var);
2085 if (replacement->new == 0)
2086 replacement->new = gen_reg_rtx (GET_MODE (var));
2087 SUBREG_REG (tem) = replacement->new;
2089 else
2090 tem = fixup_memory_subreg (tem, insn, 0);
2092 else
2093 tem = fixup_stack_1 (tem, insn);
2095 /* Unless we want to load from memory, get TEM into the proper mode
2096 for an extract from memory. This can only be done if the
2097 extract is at a constant position and length. */
2099 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2100 && GET_CODE (XEXP (x, 2)) == CONST_INT
2101 && ! mode_dependent_address_p (XEXP (tem, 0))
2102 && ! MEM_VOLATILE_P (tem))
2104 enum machine_mode wanted_mode = VOIDmode;
2105 enum machine_mode is_mode = GET_MODE (tem);
2106 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2108 #ifdef HAVE_extzv
2109 if (GET_CODE (x) == ZERO_EXTRACT)
2111 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2112 if (wanted_mode == VOIDmode)
2113 wanted_mode = word_mode;
2115 #endif
2116 #ifdef HAVE_extv
2117 if (GET_CODE (x) == SIGN_EXTRACT)
2119 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2120 if (wanted_mode == VOIDmode)
2121 wanted_mode = word_mode;
2123 #endif
2124 /* If we have a narrower mode, we can do something. */
2125 if (wanted_mode != VOIDmode
2126 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2128 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2129 rtx old_pos = XEXP (x, 2);
2130 rtx newmem;
2132 /* If the bytes and bits are counted differently, we
2133 must adjust the offset. */
2134 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2135 offset = (GET_MODE_SIZE (is_mode)
2136 - GET_MODE_SIZE (wanted_mode) - offset);
2138 pos %= GET_MODE_BITSIZE (wanted_mode);
2140 newmem = gen_rtx_MEM (wanted_mode,
2141 plus_constant (XEXP (tem, 0), offset));
2142 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2143 MEM_COPY_ATTRIBUTES (newmem, tem);
2145 /* Make the change and see if the insn remains valid. */
2146 INSN_CODE (insn) = -1;
2147 XEXP (x, 0) = newmem;
2148 XEXP (x, 2) = GEN_INT (pos);
2150 if (recog_memoized (insn) >= 0)
2151 return;
2153 /* Otherwise, restore old position. XEXP (x, 0) will be
2154 restored later. */
2155 XEXP (x, 2) = old_pos;
2159 /* If we get here, the bitfield extract insn can't accept a memory
2160 reference. Copy the input into a register. */
2162 tem1 = gen_reg_rtx (GET_MODE (tem));
2163 emit_insn_before (gen_move_insn (tem1, tem), insn);
2164 XEXP (x, 0) = tem1;
2165 return;
2167 break;
2169 case SUBREG:
2170 if (SUBREG_REG (x) == var)
2172 /* If this is a special SUBREG made because VAR was promoted
2173 from a wider mode, replace it with VAR and call ourself
2174 recursively, this time saying that the object previously
2175 had its current mode (by virtue of the SUBREG). */
2177 if (SUBREG_PROMOTED_VAR_P (x))
2179 *loc = var;
2180 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2181 return;
2184 /* If this SUBREG makes VAR wider, it has become a paradoxical
2185 SUBREG with VAR in memory, but these aren't allowed at this
2186 stage of the compilation. So load VAR into a pseudo and take
2187 a SUBREG of that pseudo. */
2188 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2190 replacement = find_fixup_replacement (replacements, var);
2191 if (replacement->new == 0)
2192 replacement->new = gen_reg_rtx (GET_MODE (var));
2193 SUBREG_REG (x) = replacement->new;
2194 return;
2197 /* See if we have already found a replacement for this SUBREG.
2198 If so, use it. Otherwise, make a MEM and see if the insn
2199 is recognized. If not, or if we should force MEM into a register,
2200 make a pseudo for this SUBREG. */
2201 replacement = find_fixup_replacement (replacements, x);
2202 if (replacement->new)
2204 *loc = replacement->new;
2205 return;
2208 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2210 INSN_CODE (insn) = -1;
2211 if (! flag_force_mem && recog_memoized (insn) >= 0)
2212 return;
2214 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2215 return;
2217 break;
2219 case SET:
2220 /* First do special simplification of bit-field references. */
2221 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2222 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2223 optimize_bit_field (x, insn, 0);
2224 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2225 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2226 optimize_bit_field (x, insn, NULL_PTR);
2228 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2229 into a register and then store it back out. */
2230 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2231 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2232 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2233 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2234 > GET_MODE_SIZE (GET_MODE (var))))
2236 replacement = find_fixup_replacement (replacements, var);
2237 if (replacement->new == 0)
2238 replacement->new = gen_reg_rtx (GET_MODE (var));
2240 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2241 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2244 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2245 insn into a pseudo and store the low part of the pseudo into VAR. */
2246 if (GET_CODE (SET_DEST (x)) == SUBREG
2247 && SUBREG_REG (SET_DEST (x)) == var
2248 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2249 > GET_MODE_SIZE (GET_MODE (var))))
2251 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2252 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2253 tem)),
2254 insn);
2255 break;
2259 rtx dest = SET_DEST (x);
2260 rtx src = SET_SRC (x);
2261 #ifdef HAVE_insv
2262 rtx outerdest = dest;
2263 #endif
2265 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2266 || GET_CODE (dest) == SIGN_EXTRACT
2267 || GET_CODE (dest) == ZERO_EXTRACT)
2268 dest = XEXP (dest, 0);
2270 if (GET_CODE (src) == SUBREG)
2271 src = XEXP (src, 0);
2273 /* If VAR does not appear at the top level of the SET
2274 just scan the lower levels of the tree. */
2276 if (src != var && dest != var)
2277 break;
2279 /* We will need to rerecognize this insn. */
2280 INSN_CODE (insn) = -1;
2282 #ifdef HAVE_insv
2283 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2285 /* Since this case will return, ensure we fixup all the
2286 operands here. */
2287 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2288 insn, replacements);
2289 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2290 insn, replacements);
2291 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2292 insn, replacements);
2294 tem = XEXP (outerdest, 0);
2296 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2297 that may appear inside a ZERO_EXTRACT.
2298 This was legitimate when the MEM was a REG. */
2299 if (GET_CODE (tem) == SUBREG
2300 && SUBREG_REG (tem) == var)
2301 tem = fixup_memory_subreg (tem, insn, 0);
2302 else
2303 tem = fixup_stack_1 (tem, insn);
2305 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2306 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2307 && ! mode_dependent_address_p (XEXP (tem, 0))
2308 && ! MEM_VOLATILE_P (tem))
2310 enum machine_mode wanted_mode;
2311 enum machine_mode is_mode = GET_MODE (tem);
2312 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2314 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2315 if (wanted_mode == VOIDmode)
2316 wanted_mode = word_mode;
2318 /* If we have a narrower mode, we can do something. */
2319 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2321 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2322 rtx old_pos = XEXP (outerdest, 2);
2323 rtx newmem;
2325 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2326 offset = (GET_MODE_SIZE (is_mode)
2327 - GET_MODE_SIZE (wanted_mode) - offset);
2329 pos %= GET_MODE_BITSIZE (wanted_mode);
2331 newmem = gen_rtx_MEM (wanted_mode,
2332 plus_constant (XEXP (tem, 0), offset));
2333 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2334 MEM_COPY_ATTRIBUTES (newmem, tem);
2336 /* Make the change and see if the insn remains valid. */
2337 INSN_CODE (insn) = -1;
2338 XEXP (outerdest, 0) = newmem;
2339 XEXP (outerdest, 2) = GEN_INT (pos);
2341 if (recog_memoized (insn) >= 0)
2342 return;
2344 /* Otherwise, restore old position. XEXP (x, 0) will be
2345 restored later. */
2346 XEXP (outerdest, 2) = old_pos;
2350 /* If we get here, the bit-field store doesn't allow memory
2351 or isn't located at a constant position. Load the value into
2352 a register, do the store, and put it back into memory. */
2354 tem1 = gen_reg_rtx (GET_MODE (tem));
2355 emit_insn_before (gen_move_insn (tem1, tem), insn);
2356 emit_insn_after (gen_move_insn (tem, tem1), insn);
2357 XEXP (outerdest, 0) = tem1;
2358 return;
2360 #endif
2362 /* STRICT_LOW_PART is a no-op on memory references
2363 and it can cause combinations to be unrecognizable,
2364 so eliminate it. */
2366 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2367 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2369 /* A valid insn to copy VAR into or out of a register
2370 must be left alone, to avoid an infinite loop here.
2371 If the reference to VAR is by a subreg, fix that up,
2372 since SUBREG is not valid for a memref.
2373 Also fix up the address of the stack slot.
2375 Note that we must not try to recognize the insn until
2376 after we know that we have valid addresses and no
2377 (subreg (mem ...) ...) constructs, since these interfere
2378 with determining the validity of the insn. */
2380 if ((SET_SRC (x) == var
2381 || (GET_CODE (SET_SRC (x)) == SUBREG
2382 && SUBREG_REG (SET_SRC (x)) == var))
2383 && (GET_CODE (SET_DEST (x)) == REG
2384 || (GET_CODE (SET_DEST (x)) == SUBREG
2385 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2386 && GET_MODE (var) == promoted_mode
2387 && x == single_set (insn))
2389 rtx pat;
2391 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2392 if (replacement->new)
2393 SET_SRC (x) = replacement->new;
2394 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2395 SET_SRC (x) = replacement->new
2396 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2397 else
2398 SET_SRC (x) = replacement->new
2399 = fixup_stack_1 (SET_SRC (x), insn);
2401 if (recog_memoized (insn) >= 0)
2402 return;
2404 /* INSN is not valid, but we know that we want to
2405 copy SET_SRC (x) to SET_DEST (x) in some way. So
2406 we generate the move and see whether it requires more
2407 than one insn. If it does, we emit those insns and
2408 delete INSN. Otherwise, we an just replace the pattern
2409 of INSN; we have already verified above that INSN has
2410 no other function that to do X. */
2412 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2413 if (GET_CODE (pat) == SEQUENCE)
2415 emit_insn_after (pat, insn);
2416 PUT_CODE (insn, NOTE);
2417 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2418 NOTE_SOURCE_FILE (insn) = 0;
2420 else
2421 PATTERN (insn) = pat;
2423 return;
2426 if ((SET_DEST (x) == var
2427 || (GET_CODE (SET_DEST (x)) == SUBREG
2428 && SUBREG_REG (SET_DEST (x)) == var))
2429 && (GET_CODE (SET_SRC (x)) == REG
2430 || (GET_CODE (SET_SRC (x)) == SUBREG
2431 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2432 && GET_MODE (var) == promoted_mode
2433 && x == single_set (insn))
2435 rtx pat;
2437 if (GET_CODE (SET_DEST (x)) == SUBREG)
2438 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2439 else
2440 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2442 if (recog_memoized (insn) >= 0)
2443 return;
2445 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2446 if (GET_CODE (pat) == SEQUENCE)
2448 emit_insn_after (pat, insn);
2449 PUT_CODE (insn, NOTE);
2450 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2451 NOTE_SOURCE_FILE (insn) = 0;
2453 else
2454 PATTERN (insn) = pat;
2456 return;
2459 /* Otherwise, storing into VAR must be handled specially
2460 by storing into a temporary and copying that into VAR
2461 with a new insn after this one. Note that this case
2462 will be used when storing into a promoted scalar since
2463 the insn will now have different modes on the input
2464 and output and hence will be invalid (except for the case
2465 of setting it to a constant, which does not need any
2466 change if it is valid). We generate extra code in that case,
2467 but combine.c will eliminate it. */
2469 if (dest == var)
2471 rtx temp;
2472 rtx fixeddest = SET_DEST (x);
2474 /* STRICT_LOW_PART can be discarded, around a MEM. */
2475 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2476 fixeddest = XEXP (fixeddest, 0);
2477 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2478 if (GET_CODE (fixeddest) == SUBREG)
2480 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2481 promoted_mode = GET_MODE (fixeddest);
2483 else
2484 fixeddest = fixup_stack_1 (fixeddest, insn);
2486 temp = gen_reg_rtx (promoted_mode);
2488 emit_insn_after (gen_move_insn (fixeddest,
2489 gen_lowpart (GET_MODE (fixeddest),
2490 temp)),
2491 insn);
2493 SET_DEST (x) = temp;
2497 default:
2498 break;
2501 /* Nothing special about this RTX; fix its operands. */
2503 fmt = GET_RTX_FORMAT (code);
2504 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2506 if (fmt[i] == 'e')
2507 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2508 if (fmt[i] == 'E')
2510 register int j;
2511 for (j = 0; j < XVECLEN (x, i); j++)
2512 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2513 insn, replacements);
2518 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2519 return an rtx (MEM:m1 newaddr) which is equivalent.
2520 If any insns must be emitted to compute NEWADDR, put them before INSN.
2522 UNCRITICAL nonzero means accept paradoxical subregs.
2523 This is used for subregs found inside REG_NOTES. */
2525 static rtx
2526 fixup_memory_subreg (x, insn, uncritical)
2527 rtx x;
2528 rtx insn;
2529 int uncritical;
2531 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2532 rtx addr = XEXP (SUBREG_REG (x), 0);
2533 enum machine_mode mode = GET_MODE (x);
2534 rtx result;
2536 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2537 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2538 && ! uncritical)
2539 abort ();
2541 if (BYTES_BIG_ENDIAN)
2542 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2543 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2544 addr = plus_constant (addr, offset);
2545 if (!flag_force_addr && memory_address_p (mode, addr))
2546 /* Shortcut if no insns need be emitted. */
2547 return change_address (SUBREG_REG (x), mode, addr);
2548 start_sequence ();
2549 result = change_address (SUBREG_REG (x), mode, addr);
2550 emit_insn_before (gen_sequence (), insn);
2551 end_sequence ();
2552 return result;
2555 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2556 Replace subexpressions of X in place.
2557 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2558 Otherwise return X, with its contents possibly altered.
2560 If any insns must be emitted to compute NEWADDR, put them before INSN.
2562 UNCRITICAL is as in fixup_memory_subreg. */
2564 static rtx
2565 walk_fixup_memory_subreg (x, insn, uncritical)
2566 register rtx x;
2567 rtx insn;
2568 int uncritical;
2570 register enum rtx_code code;
2571 register char *fmt;
2572 register int i;
2574 if (x == 0)
2575 return 0;
2577 code = GET_CODE (x);
2579 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2580 return fixup_memory_subreg (x, insn, uncritical);
2582 /* Nothing special about this RTX; fix its operands. */
2584 fmt = GET_RTX_FORMAT (code);
2585 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2587 if (fmt[i] == 'e')
2588 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2589 if (fmt[i] == 'E')
2591 register int j;
2592 for (j = 0; j < XVECLEN (x, i); j++)
2593 XVECEXP (x, i, j)
2594 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2597 return x;
2600 /* For each memory ref within X, if it refers to a stack slot
2601 with an out of range displacement, put the address in a temp register
2602 (emitting new insns before INSN to load these registers)
2603 and alter the memory ref to use that register.
2604 Replace each such MEM rtx with a copy, to avoid clobberage. */
2606 static rtx
2607 fixup_stack_1 (x, insn)
2608 rtx x;
2609 rtx insn;
2611 register int i;
2612 register RTX_CODE code = GET_CODE (x);
2613 register char *fmt;
2615 if (code == MEM)
2617 register rtx ad = XEXP (x, 0);
2618 /* If we have address of a stack slot but it's not valid
2619 (displacement is too large), compute the sum in a register. */
2620 if (GET_CODE (ad) == PLUS
2621 && GET_CODE (XEXP (ad, 0)) == REG
2622 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2623 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2624 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2625 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2626 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2627 #endif
2628 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2629 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2630 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2631 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2633 rtx temp, seq;
2634 if (memory_address_p (GET_MODE (x), ad))
2635 return x;
2637 start_sequence ();
2638 temp = copy_to_reg (ad);
2639 seq = gen_sequence ();
2640 end_sequence ();
2641 emit_insn_before (seq, insn);
2642 return change_address (x, VOIDmode, temp);
2644 return x;
2647 fmt = GET_RTX_FORMAT (code);
2648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2650 if (fmt[i] == 'e')
2651 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2652 if (fmt[i] == 'E')
2654 register int j;
2655 for (j = 0; j < XVECLEN (x, i); j++)
2656 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2659 return x;
2662 /* Optimization: a bit-field instruction whose field
2663 happens to be a byte or halfword in memory
2664 can be changed to a move instruction.
2666 We call here when INSN is an insn to examine or store into a bit-field.
2667 BODY is the SET-rtx to be altered.
2669 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2670 (Currently this is called only from function.c, and EQUIV_MEM
2671 is always 0.) */
2673 static void
2674 optimize_bit_field (body, insn, equiv_mem)
2675 rtx body;
2676 rtx insn;
2677 rtx *equiv_mem;
2679 register rtx bitfield;
2680 int destflag;
2681 rtx seq = 0;
2682 enum machine_mode mode;
2684 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2685 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2686 bitfield = SET_DEST (body), destflag = 1;
2687 else
2688 bitfield = SET_SRC (body), destflag = 0;
2690 /* First check that the field being stored has constant size and position
2691 and is in fact a byte or halfword suitably aligned. */
2693 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2694 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2695 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2696 != BLKmode)
2697 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2699 register rtx memref = 0;
2701 /* Now check that the containing word is memory, not a register,
2702 and that it is safe to change the machine mode. */
2704 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2705 memref = XEXP (bitfield, 0);
2706 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2707 && equiv_mem != 0)
2708 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2709 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2710 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2711 memref = SUBREG_REG (XEXP (bitfield, 0));
2712 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2713 && equiv_mem != 0
2714 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2715 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2717 if (memref
2718 && ! mode_dependent_address_p (XEXP (memref, 0))
2719 && ! MEM_VOLATILE_P (memref))
2721 /* Now adjust the address, first for any subreg'ing
2722 that we are now getting rid of,
2723 and then for which byte of the word is wanted. */
2725 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2726 rtx insns;
2728 /* Adjust OFFSET to count bits from low-address byte. */
2729 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2730 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2731 - offset - INTVAL (XEXP (bitfield, 1)));
2733 /* Adjust OFFSET to count bytes from low-address byte. */
2734 offset /= BITS_PER_UNIT;
2735 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2737 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2738 if (BYTES_BIG_ENDIAN)
2739 offset -= (MIN (UNITS_PER_WORD,
2740 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2741 - MIN (UNITS_PER_WORD,
2742 GET_MODE_SIZE (GET_MODE (memref))));
2745 start_sequence ();
2746 memref = change_address (memref, mode,
2747 plus_constant (XEXP (memref, 0), offset));
2748 insns = get_insns ();
2749 end_sequence ();
2750 emit_insns_before (insns, insn);
2752 /* Store this memory reference where
2753 we found the bit field reference. */
2755 if (destflag)
2757 validate_change (insn, &SET_DEST (body), memref, 1);
2758 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2760 rtx src = SET_SRC (body);
2761 while (GET_CODE (src) == SUBREG
2762 && SUBREG_WORD (src) == 0)
2763 src = SUBREG_REG (src);
2764 if (GET_MODE (src) != GET_MODE (memref))
2765 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2766 validate_change (insn, &SET_SRC (body), src, 1);
2768 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2769 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2770 /* This shouldn't happen because anything that didn't have
2771 one of these modes should have got converted explicitly
2772 and then referenced through a subreg.
2773 This is so because the original bit-field was
2774 handled by agg_mode and so its tree structure had
2775 the same mode that memref now has. */
2776 abort ();
2778 else
2780 rtx dest = SET_DEST (body);
2782 while (GET_CODE (dest) == SUBREG
2783 && SUBREG_WORD (dest) == 0
2784 && (GET_MODE_CLASS (GET_MODE (dest))
2785 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2786 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2787 <= UNITS_PER_WORD))
2788 dest = SUBREG_REG (dest);
2790 validate_change (insn, &SET_DEST (body), dest, 1);
2792 if (GET_MODE (dest) == GET_MODE (memref))
2793 validate_change (insn, &SET_SRC (body), memref, 1);
2794 else
2796 /* Convert the mem ref to the destination mode. */
2797 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2799 start_sequence ();
2800 convert_move (newreg, memref,
2801 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2802 seq = get_insns ();
2803 end_sequence ();
2805 validate_change (insn, &SET_SRC (body), newreg, 1);
2809 /* See if we can convert this extraction or insertion into
2810 a simple move insn. We might not be able to do so if this
2811 was, for example, part of a PARALLEL.
2813 If we succeed, write out any needed conversions. If we fail,
2814 it is hard to guess why we failed, so don't do anything
2815 special; just let the optimization be suppressed. */
2817 if (apply_change_group () && seq)
2818 emit_insns_before (seq, insn);
2823 /* These routines are responsible for converting virtual register references
2824 to the actual hard register references once RTL generation is complete.
2826 The following four variables are used for communication between the
2827 routines. They contain the offsets of the virtual registers from their
2828 respective hard registers. */
2830 static int in_arg_offset;
2831 static int var_offset;
2832 static int dynamic_offset;
2833 static int out_arg_offset;
2834 static int cfa_offset;
2836 /* In most machines, the stack pointer register is equivalent to the bottom
2837 of the stack. */
2839 #ifndef STACK_POINTER_OFFSET
2840 #define STACK_POINTER_OFFSET 0
2841 #endif
2843 /* If not defined, pick an appropriate default for the offset of dynamically
2844 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2845 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2847 #ifndef STACK_DYNAMIC_OFFSET
2849 #ifdef ACCUMULATE_OUTGOING_ARGS
2850 /* The bottom of the stack points to the actual arguments. If
2851 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2852 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2853 stack space for register parameters is not pushed by the caller, but
2854 rather part of the fixed stack areas and hence not included in
2855 `current_function_outgoing_args_size'. Nevertheless, we must allow
2856 for it when allocating stack dynamic objects. */
2858 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2859 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2860 (current_function_outgoing_args_size \
2861 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2863 #else
2864 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2865 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2866 #endif
2868 #else
2869 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2870 #endif
2871 #endif
2873 /* On a few machines, the CFA coincides with the arg pointer. */
2875 #ifndef ARG_POINTER_CFA_OFFSET
2876 #define ARG_POINTER_CFA_OFFSET 0
2877 #endif
2880 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2881 its address taken. DECL is the decl for the object stored in the
2882 register, for later use if we do need to force REG into the stack.
2883 REG is overwritten by the MEM like in put_reg_into_stack. */
2886 gen_mem_addressof (reg, decl)
2887 rtx reg;
2888 tree decl;
2890 tree type = TREE_TYPE (decl);
2891 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2892 SET_ADDRESSOF_DECL (r, decl);
2893 /* If the original REG was a user-variable, then so is the REG whose
2894 address is being taken. */
2895 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2897 XEXP (reg, 0) = r;
2898 PUT_CODE (reg, MEM);
2899 PUT_MODE (reg, DECL_MODE (decl));
2900 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2901 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2902 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2904 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2905 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2907 return reg;
2910 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2912 void
2913 flush_addressof (decl)
2914 tree decl;
2916 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2917 && DECL_RTL (decl) != 0
2918 && GET_CODE (DECL_RTL (decl)) == MEM
2919 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2920 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2921 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2924 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2926 static void
2927 put_addressof_into_stack (r)
2928 rtx r;
2930 tree decl = ADDRESSOF_DECL (r);
2931 rtx reg = XEXP (r, 0);
2933 if (GET_CODE (reg) != REG)
2934 abort ();
2936 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2937 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2938 ADDRESSOF_REGNO (r),
2939 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2942 /* List of replacements made below in purge_addressof_1 when creating
2943 bitfield insertions. */
2944 static rtx purge_addressof_replacements;
2946 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2947 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2948 the stack. */
2950 static void
2951 purge_addressof_1 (loc, insn, force, store)
2952 rtx *loc;
2953 rtx insn;
2954 int force, store;
2956 rtx x;
2957 RTX_CODE code;
2958 int i, j;
2959 char *fmt;
2961 /* Re-start here to avoid recursion in common cases. */
2962 restart:
2964 x = *loc;
2965 if (x == 0)
2966 return;
2968 code = GET_CODE (x);
2970 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2972 rtx insns;
2973 /* We must create a copy of the rtx because it was created by
2974 overwriting a REG rtx which is always shared. */
2975 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2977 if (validate_change (insn, loc, sub, 0)
2978 || validate_replace_rtx (x, sub, insn))
2979 return;
2981 start_sequence ();
2982 sub = force_operand (sub, NULL_RTX);
2983 if (! validate_change (insn, loc, sub, 0)
2984 && ! validate_replace_rtx (x, sub, insn))
2985 abort ();
2987 insns = gen_sequence ();
2988 end_sequence ();
2989 emit_insn_before (insns, insn);
2990 return;
2992 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2994 rtx sub = XEXP (XEXP (x, 0), 0);
2995 rtx sub2;
2997 if (GET_CODE (sub) == MEM)
2999 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3000 MEM_COPY_ATTRIBUTES (sub2, sub);
3001 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3002 sub = sub2;
3005 if (GET_CODE (sub) == REG
3006 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3008 put_addressof_into_stack (XEXP (x, 0));
3009 return;
3011 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3013 int size_x, size_sub;
3015 if (!insn)
3017 /* When processing REG_NOTES look at the list of
3018 replacements done on the insn to find the register that X
3019 was replaced by. */
3020 rtx tem;
3022 for (tem = purge_addressof_replacements; tem != NULL_RTX;
3023 tem = XEXP (XEXP (tem, 1), 1))
3025 rtx y = XEXP (tem, 0);
3026 if (GET_CODE (y) == MEM
3027 && rtx_equal_p (XEXP (x, 0), XEXP (y, 0)))
3029 /* It can happen that the note may speak of things in
3030 a wider (or just different) mode than the code did.
3031 This is especially true of REG_RETVAL. */
3033 rtx z = XEXP (XEXP (tem, 1), 0);
3034 if (GET_MODE (x) != GET_MODE (y))
3036 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3037 z = SUBREG_REG (z);
3039 /* ??? If we'd gotten into any of the really complex
3040 cases below, I'm not sure we can do a proper
3041 replacement. Might we be able to delete the
3042 note in some cases? */
3043 if (GET_MODE_SIZE (GET_MODE (x))
3044 < GET_MODE_SIZE (GET_MODE (y)))
3045 abort ();
3047 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3048 && (GET_MODE_SIZE (GET_MODE (x))
3049 > GET_MODE_SIZE (GET_MODE (z))))
3051 /* This can occur as a result in invalid
3052 pointer casts, e.g. float f; ...
3053 *(long long int *)&f.
3054 ??? We could emit a warning here, but
3055 without a line number that wouldn't be
3056 very helpful. */
3057 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3059 else
3060 z = gen_lowpart (GET_MODE (x), z);
3063 *loc = z;
3064 return;
3068 /* There should always be such a replacement. */
3069 abort ();
3072 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3073 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3075 /* Don't even consider working with paradoxical subregs,
3076 or the moral equivalent seen here. */
3077 if (size_x <= size_sub
3078 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3080 /* Do a bitfield insertion to mirror what would happen
3081 in memory. */
3083 rtx val, seq;
3085 if (store)
3087 rtx p;
3089 start_sequence ();
3090 val = gen_reg_rtx (GET_MODE (x));
3091 if (! validate_change (insn, loc, val, 0))
3093 /* Discard the current sequence and put the
3094 ADDRESSOF on stack. */
3095 end_sequence ();
3096 goto give_up;
3098 seq = gen_sequence ();
3099 end_sequence ();
3100 emit_insn_before (seq, insn);
3102 start_sequence ();
3103 store_bit_field (sub, size_x, 0, GET_MODE (x),
3104 val, GET_MODE_SIZE (GET_MODE (sub)),
3105 GET_MODE_SIZE (GET_MODE (sub)));
3107 /* Make sure to unshare any shared rtl that store_bit_field
3108 might have created. */
3109 for (p = get_insns(); p; p = NEXT_INSN (p))
3111 reset_used_flags (PATTERN (p));
3112 reset_used_flags (REG_NOTES (p));
3113 reset_used_flags (LOG_LINKS (p));
3115 unshare_all_rtl (get_insns ());
3117 seq = gen_sequence ();
3118 end_sequence ();
3119 emit_insn_after (seq, insn);
3121 else
3123 start_sequence ();
3124 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3125 GET_MODE (x), GET_MODE (x),
3126 GET_MODE_SIZE (GET_MODE (sub)),
3127 GET_MODE_SIZE (GET_MODE (sub)));
3129 if (! validate_change (insn, loc, val, 0))
3131 /* Discard the current sequence and put the
3132 ADDRESSOF on stack. */
3133 end_sequence ();
3134 goto give_up;
3137 seq = gen_sequence ();
3138 end_sequence ();
3139 emit_insn_before (seq, insn);
3142 /* Remember the replacement so that the same one can be done
3143 on the REG_NOTES. */
3144 purge_addressof_replacements
3145 = gen_rtx_EXPR_LIST (VOIDmode, x,
3146 gen_rtx_EXPR_LIST (VOIDmode, val,
3147 purge_addressof_replacements));
3149 /* We replaced with a reg -- all done. */
3150 return;
3153 else if (validate_change (insn, loc, sub, 0))
3155 /* Remember the replacement so that the same one can be done
3156 on the REG_NOTES. */
3157 purge_addressof_replacements
3158 = gen_rtx_EXPR_LIST (VOIDmode, x,
3159 gen_rtx_EXPR_LIST (VOIDmode, sub,
3160 purge_addressof_replacements));
3161 goto restart;
3163 give_up:;
3164 /* else give up and put it into the stack */
3166 else if (code == ADDRESSOF)
3168 put_addressof_into_stack (x);
3169 return;
3171 else if (code == SET)
3173 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
3174 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
3175 return;
3178 /* Scan all subexpressions. */
3179 fmt = GET_RTX_FORMAT (code);
3180 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3182 if (*fmt == 'e')
3183 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
3184 else if (*fmt == 'E')
3185 for (j = 0; j < XVECLEN (x, i); j++)
3186 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
3190 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3191 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3192 stack. */
3194 void
3195 purge_addressof (insns)
3196 rtx insns;
3198 rtx insn;
3199 for (insn = insns; insn; insn = NEXT_INSN (insn))
3200 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3201 || GET_CODE (insn) == CALL_INSN)
3203 purge_addressof_1 (&PATTERN (insn), insn,
3204 asm_noperands (PATTERN (insn)) > 0, 0);
3205 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
3207 purge_addressof_replacements = 0;
3210 /* Pass through the INSNS of function FNDECL and convert virtual register
3211 references to hard register references. */
3213 void
3214 instantiate_virtual_regs (fndecl, insns)
3215 tree fndecl;
3216 rtx insns;
3218 rtx insn;
3219 int i;
3221 /* Compute the offsets to use for this function. */
3222 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3223 var_offset = STARTING_FRAME_OFFSET;
3224 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3225 out_arg_offset = STACK_POINTER_OFFSET;
3226 cfa_offset = ARG_POINTER_CFA_OFFSET;
3228 /* Scan all variables and parameters of this function. For each that is
3229 in memory, instantiate all virtual registers if the result is a valid
3230 address. If not, we do it later. That will handle most uses of virtual
3231 regs on many machines. */
3232 instantiate_decls (fndecl, 1);
3234 /* Initialize recognition, indicating that volatile is OK. */
3235 init_recog ();
3237 /* Scan through all the insns, instantiating every virtual register still
3238 present. */
3239 for (insn = insns; insn; insn = NEXT_INSN (insn))
3240 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3241 || GET_CODE (insn) == CALL_INSN)
3243 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3244 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3247 /* Instantiate the stack slots for the parm registers, for later use in
3248 addressof elimination. */
3249 for (i = 0; i < max_parm_reg; ++i)
3250 if (parm_reg_stack_loc[i])
3251 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3253 /* Now instantiate the remaining register equivalences for debugging info.
3254 These will not be valid addresses. */
3255 instantiate_decls (fndecl, 0);
3257 /* Indicate that, from now on, assign_stack_local should use
3258 frame_pointer_rtx. */
3259 virtuals_instantiated = 1;
3262 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3263 all virtual registers in their DECL_RTL's.
3265 If VALID_ONLY, do this only if the resulting address is still valid.
3266 Otherwise, always do it. */
3268 static void
3269 instantiate_decls (fndecl, valid_only)
3270 tree fndecl;
3271 int valid_only;
3273 tree decl;
3275 if (DECL_SAVED_INSNS (fndecl))
3276 /* When compiling an inline function, the obstack used for
3277 rtl allocation is the maybepermanent_obstack. Calling
3278 `resume_temporary_allocation' switches us back to that
3279 obstack while we process this function's parameters. */
3280 resume_temporary_allocation ();
3282 /* Process all parameters of the function. */
3283 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3285 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3287 instantiate_decl (DECL_RTL (decl), size, valid_only);
3289 /* If the parameter was promoted, then the incoming RTL mode may be
3290 larger than the declared type size. We must use the larger of
3291 the two sizes. */
3292 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3293 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3296 /* Now process all variables defined in the function or its subblocks. */
3297 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3299 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3301 /* Save all rtl allocated for this function by raising the
3302 high-water mark on the maybepermanent_obstack. */
3303 preserve_data ();
3304 /* All further rtl allocation is now done in the current_obstack. */
3305 rtl_in_current_obstack ();
3309 /* Subroutine of instantiate_decls: Process all decls in the given
3310 BLOCK node and all its subblocks. */
3312 static void
3313 instantiate_decls_1 (let, valid_only)
3314 tree let;
3315 int valid_only;
3317 tree t;
3319 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3320 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3321 valid_only);
3323 /* Process all subblocks. */
3324 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3325 instantiate_decls_1 (t, valid_only);
3328 /* Subroutine of the preceding procedures: Given RTL representing a
3329 decl and the size of the object, do any instantiation required.
3331 If VALID_ONLY is non-zero, it means that the RTL should only be
3332 changed if the new address is valid. */
3334 static void
3335 instantiate_decl (x, size, valid_only)
3336 rtx x;
3337 int size;
3338 int valid_only;
3340 enum machine_mode mode;
3341 rtx addr;
3343 /* If this is not a MEM, no need to do anything. Similarly if the
3344 address is a constant or a register that is not a virtual register. */
3346 if (x == 0 || GET_CODE (x) != MEM)
3347 return;
3349 addr = XEXP (x, 0);
3350 if (CONSTANT_P (addr)
3351 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3352 || (GET_CODE (addr) == REG
3353 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3354 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3355 return;
3357 /* If we should only do this if the address is valid, copy the address.
3358 We need to do this so we can undo any changes that might make the
3359 address invalid. This copy is unfortunate, but probably can't be
3360 avoided. */
3362 if (valid_only)
3363 addr = copy_rtx (addr);
3365 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3367 if (valid_only)
3369 /* Now verify that the resulting address is valid for every integer or
3370 floating-point mode up to and including SIZE bytes long. We do this
3371 since the object might be accessed in any mode and frame addresses
3372 are shared. */
3374 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3375 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3376 mode = GET_MODE_WIDER_MODE (mode))
3377 if (! memory_address_p (mode, addr))
3378 return;
3380 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3381 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3382 mode = GET_MODE_WIDER_MODE (mode))
3383 if (! memory_address_p (mode, addr))
3384 return;
3387 /* Put back the address now that we have updated it and we either know
3388 it is valid or we don't care whether it is valid. */
3390 XEXP (x, 0) = addr;
3393 /* Given a pointer to a piece of rtx and an optional pointer to the
3394 containing object, instantiate any virtual registers present in it.
3396 If EXTRA_INSNS, we always do the replacement and generate
3397 any extra insns before OBJECT. If it zero, we do nothing if replacement
3398 is not valid.
3400 Return 1 if we either had nothing to do or if we were able to do the
3401 needed replacement. Return 0 otherwise; we only return zero if
3402 EXTRA_INSNS is zero.
3404 We first try some simple transformations to avoid the creation of extra
3405 pseudos. */
3407 static int
3408 instantiate_virtual_regs_1 (loc, object, extra_insns)
3409 rtx *loc;
3410 rtx object;
3411 int extra_insns;
3413 rtx x;
3414 RTX_CODE code;
3415 rtx new = 0;
3416 HOST_WIDE_INT offset;
3417 rtx temp;
3418 rtx seq;
3419 int i, j;
3420 char *fmt;
3422 /* Re-start here to avoid recursion in common cases. */
3423 restart:
3425 x = *loc;
3426 if (x == 0)
3427 return 1;
3429 code = GET_CODE (x);
3431 /* Check for some special cases. */
3432 switch (code)
3434 case CONST_INT:
3435 case CONST_DOUBLE:
3436 case CONST:
3437 case SYMBOL_REF:
3438 case CODE_LABEL:
3439 case PC:
3440 case CC0:
3441 case ASM_INPUT:
3442 case ADDR_VEC:
3443 case ADDR_DIFF_VEC:
3444 case RETURN:
3445 return 1;
3447 case SET:
3448 /* We are allowed to set the virtual registers. This means that
3449 the actual register should receive the source minus the
3450 appropriate offset. This is used, for example, in the handling
3451 of non-local gotos. */
3452 if (SET_DEST (x) == virtual_incoming_args_rtx)
3453 new = arg_pointer_rtx, offset = - in_arg_offset;
3454 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3455 new = frame_pointer_rtx, offset = - var_offset;
3456 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3457 new = stack_pointer_rtx, offset = - dynamic_offset;
3458 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3459 new = stack_pointer_rtx, offset = - out_arg_offset;
3460 else if (SET_DEST (x) == virtual_cfa_rtx)
3461 new = arg_pointer_rtx, offset = - cfa_offset;
3463 if (new)
3465 /* The only valid sources here are PLUS or REG. Just do
3466 the simplest possible thing to handle them. */
3467 if (GET_CODE (SET_SRC (x)) != REG
3468 && GET_CODE (SET_SRC (x)) != PLUS)
3469 abort ();
3471 start_sequence ();
3472 if (GET_CODE (SET_SRC (x)) != REG)
3473 temp = force_operand (SET_SRC (x), NULL_RTX);
3474 else
3475 temp = SET_SRC (x);
3476 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3477 seq = get_insns ();
3478 end_sequence ();
3480 emit_insns_before (seq, object);
3481 SET_DEST (x) = new;
3483 if (! validate_change (object, &SET_SRC (x), temp, 0)
3484 || ! extra_insns)
3485 abort ();
3487 return 1;
3490 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3491 loc = &SET_SRC (x);
3492 goto restart;
3494 case PLUS:
3495 /* Handle special case of virtual register plus constant. */
3496 if (CONSTANT_P (XEXP (x, 1)))
3498 rtx old, new_offset;
3500 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3501 if (GET_CODE (XEXP (x, 0)) == PLUS)
3503 rtx inner = XEXP (XEXP (x, 0), 0);
3505 if (inner == virtual_incoming_args_rtx)
3506 new = arg_pointer_rtx, offset = in_arg_offset;
3507 else if (inner == virtual_stack_vars_rtx)
3508 new = frame_pointer_rtx, offset = var_offset;
3509 else if (inner == virtual_stack_dynamic_rtx)
3510 new = stack_pointer_rtx, offset = dynamic_offset;
3511 else if (inner == virtual_outgoing_args_rtx)
3512 new = stack_pointer_rtx, offset = out_arg_offset;
3513 else if (inner == virtual_cfa_rtx)
3514 new = arg_pointer_rtx, offset = cfa_offset;
3515 else
3517 loc = &XEXP (x, 0);
3518 goto restart;
3521 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3522 extra_insns);
3523 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3526 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3527 new = arg_pointer_rtx, offset = in_arg_offset;
3528 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3529 new = frame_pointer_rtx, offset = var_offset;
3530 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3531 new = stack_pointer_rtx, offset = dynamic_offset;
3532 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3533 new = stack_pointer_rtx, offset = out_arg_offset;
3534 else if (XEXP (x, 0) == virtual_cfa_rtx)
3535 new = arg_pointer_rtx, offset = cfa_offset;
3536 else
3538 /* We know the second operand is a constant. Unless the
3539 first operand is a REG (which has been already checked),
3540 it needs to be checked. */
3541 if (GET_CODE (XEXP (x, 0)) != REG)
3543 loc = &XEXP (x, 0);
3544 goto restart;
3546 return 1;
3549 new_offset = plus_constant (XEXP (x, 1), offset);
3551 /* If the new constant is zero, try to replace the sum with just
3552 the register. */
3553 if (new_offset == const0_rtx
3554 && validate_change (object, loc, new, 0))
3555 return 1;
3557 /* Next try to replace the register and new offset.
3558 There are two changes to validate here and we can't assume that
3559 in the case of old offset equals new just changing the register
3560 will yield a valid insn. In the interests of a little efficiency,
3561 however, we only call validate change once (we don't queue up the
3562 changes and then call apply_change_group). */
3564 old = XEXP (x, 0);
3565 if (offset == 0
3566 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3567 : (XEXP (x, 0) = new,
3568 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3570 if (! extra_insns)
3572 XEXP (x, 0) = old;
3573 return 0;
3576 /* Otherwise copy the new constant into a register and replace
3577 constant with that register. */
3578 temp = gen_reg_rtx (Pmode);
3579 XEXP (x, 0) = new;
3580 if (validate_change (object, &XEXP (x, 1), temp, 0))
3581 emit_insn_before (gen_move_insn (temp, new_offset), object);
3582 else
3584 /* If that didn't work, replace this expression with a
3585 register containing the sum. */
3587 XEXP (x, 0) = old;
3588 new = gen_rtx_PLUS (Pmode, new, new_offset);
3590 start_sequence ();
3591 temp = force_operand (new, NULL_RTX);
3592 seq = get_insns ();
3593 end_sequence ();
3595 emit_insns_before (seq, object);
3596 if (! validate_change (object, loc, temp, 0)
3597 && ! validate_replace_rtx (x, temp, object))
3598 abort ();
3602 return 1;
3605 /* Fall through to generic two-operand expression case. */
3606 case EXPR_LIST:
3607 case CALL:
3608 case COMPARE:
3609 case MINUS:
3610 case MULT:
3611 case DIV: case UDIV:
3612 case MOD: case UMOD:
3613 case AND: case IOR: case XOR:
3614 case ROTATERT: case ROTATE:
3615 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3616 case NE: case EQ:
3617 case GE: case GT: case GEU: case GTU:
3618 case LE: case LT: case LEU: case LTU:
3619 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3620 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3621 loc = &XEXP (x, 0);
3622 goto restart;
3624 case MEM:
3625 /* Most cases of MEM that convert to valid addresses have already been
3626 handled by our scan of decls. The only special handling we
3627 need here is to make a copy of the rtx to ensure it isn't being
3628 shared if we have to change it to a pseudo.
3630 If the rtx is a simple reference to an address via a virtual register,
3631 it can potentially be shared. In such cases, first try to make it
3632 a valid address, which can also be shared. Otherwise, copy it and
3633 proceed normally.
3635 First check for common cases that need no processing. These are
3636 usually due to instantiation already being done on a previous instance
3637 of a shared rtx. */
3639 temp = XEXP (x, 0);
3640 if (CONSTANT_ADDRESS_P (temp)
3641 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3642 || temp == arg_pointer_rtx
3643 #endif
3644 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3645 || temp == hard_frame_pointer_rtx
3646 #endif
3647 || temp == frame_pointer_rtx)
3648 return 1;
3650 if (GET_CODE (temp) == PLUS
3651 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3652 && (XEXP (temp, 0) == frame_pointer_rtx
3653 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3654 || XEXP (temp, 0) == hard_frame_pointer_rtx
3655 #endif
3656 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3657 || XEXP (temp, 0) == arg_pointer_rtx
3658 #endif
3660 return 1;
3662 if (temp == virtual_stack_vars_rtx
3663 || temp == virtual_incoming_args_rtx
3664 || (GET_CODE (temp) == PLUS
3665 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3666 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3667 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3669 /* This MEM may be shared. If the substitution can be done without
3670 the need to generate new pseudos, we want to do it in place
3671 so all copies of the shared rtx benefit. The call below will
3672 only make substitutions if the resulting address is still
3673 valid.
3675 Note that we cannot pass X as the object in the recursive call
3676 since the insn being processed may not allow all valid
3677 addresses. However, if we were not passed on object, we can
3678 only modify X without copying it if X will have a valid
3679 address.
3681 ??? Also note that this can still lose if OBJECT is an insn that
3682 has less restrictions on an address that some other insn.
3683 In that case, we will modify the shared address. This case
3684 doesn't seem very likely, though. One case where this could
3685 happen is in the case of a USE or CLOBBER reference, but we
3686 take care of that below. */
3688 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3689 object ? object : x, 0))
3690 return 1;
3692 /* Otherwise make a copy and process that copy. We copy the entire
3693 RTL expression since it might be a PLUS which could also be
3694 shared. */
3695 *loc = x = copy_rtx (x);
3698 /* Fall through to generic unary operation case. */
3699 case SUBREG:
3700 case STRICT_LOW_PART:
3701 case NEG: case NOT:
3702 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3703 case SIGN_EXTEND: case ZERO_EXTEND:
3704 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3705 case FLOAT: case FIX:
3706 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3707 case ABS:
3708 case SQRT:
3709 case FFS:
3710 /* These case either have just one operand or we know that we need not
3711 check the rest of the operands. */
3712 loc = &XEXP (x, 0);
3713 goto restart;
3715 case USE:
3716 case CLOBBER:
3717 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3718 go ahead and make the invalid one, but do it to a copy. For a REG,
3719 just make the recursive call, since there's no chance of a problem. */
3721 if ((GET_CODE (XEXP (x, 0)) == MEM
3722 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3724 || (GET_CODE (XEXP (x, 0)) == REG
3725 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3726 return 1;
3728 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3729 loc = &XEXP (x, 0);
3730 goto restart;
3732 case REG:
3733 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3734 in front of this insn and substitute the temporary. */
3735 if (x == virtual_incoming_args_rtx)
3736 new = arg_pointer_rtx, offset = in_arg_offset;
3737 else if (x == virtual_stack_vars_rtx)
3738 new = frame_pointer_rtx, offset = var_offset;
3739 else if (x == virtual_stack_dynamic_rtx)
3740 new = stack_pointer_rtx, offset = dynamic_offset;
3741 else if (x == virtual_outgoing_args_rtx)
3742 new = stack_pointer_rtx, offset = out_arg_offset;
3743 else if (x == virtual_cfa_rtx)
3744 new = arg_pointer_rtx, offset = cfa_offset;
3746 if (new)
3748 temp = plus_constant (new, offset);
3749 if (!validate_change (object, loc, temp, 0))
3751 if (! extra_insns)
3752 return 0;
3754 start_sequence ();
3755 temp = force_operand (temp, NULL_RTX);
3756 seq = get_insns ();
3757 end_sequence ();
3759 emit_insns_before (seq, object);
3760 if (! validate_change (object, loc, temp, 0)
3761 && ! validate_replace_rtx (x, temp, object))
3762 abort ();
3766 return 1;
3768 case ADDRESSOF:
3769 if (GET_CODE (XEXP (x, 0)) == REG)
3770 return 1;
3772 else if (GET_CODE (XEXP (x, 0)) == MEM)
3774 /* If we have a (addressof (mem ..)), do any instantiation inside
3775 since we know we'll be making the inside valid when we finally
3776 remove the ADDRESSOF. */
3777 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3778 return 1;
3780 break;
3782 default:
3783 break;
3786 /* Scan all subexpressions. */
3787 fmt = GET_RTX_FORMAT (code);
3788 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3789 if (*fmt == 'e')
3791 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3792 return 0;
3794 else if (*fmt == 'E')
3795 for (j = 0; j < XVECLEN (x, i); j++)
3796 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3797 extra_insns))
3798 return 0;
3800 return 1;
3803 /* Optimization: assuming this function does not receive nonlocal gotos,
3804 delete the handlers for such, as well as the insns to establish
3805 and disestablish them. */
3807 static void
3808 delete_handlers ()
3810 rtx insn;
3811 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3813 /* Delete the handler by turning off the flag that would
3814 prevent jump_optimize from deleting it.
3815 Also permit deletion of the nonlocal labels themselves
3816 if nothing local refers to them. */
3817 if (GET_CODE (insn) == CODE_LABEL)
3819 tree t, last_t;
3821 LABEL_PRESERVE_P (insn) = 0;
3823 /* Remove it from the nonlocal_label list, to avoid confusing
3824 flow. */
3825 for (t = nonlocal_labels, last_t = 0; t;
3826 last_t = t, t = TREE_CHAIN (t))
3827 if (DECL_RTL (TREE_VALUE (t)) == insn)
3828 break;
3829 if (t)
3831 if (! last_t)
3832 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3833 else
3834 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3837 if (GET_CODE (insn) == INSN)
3839 int can_delete = 0;
3840 rtx t;
3841 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3842 if (reg_mentioned_p (t, PATTERN (insn)))
3844 can_delete = 1;
3845 break;
3847 if (can_delete
3848 || (nonlocal_goto_stack_level != 0
3849 && reg_mentioned_p (nonlocal_goto_stack_level,
3850 PATTERN (insn))))
3851 delete_insn (insn);
3856 /* Output a USE for any register use in RTL.
3857 This is used with -noreg to mark the extent of lifespan
3858 of any registers used in a user-visible variable's DECL_RTL. */
3860 void
3861 use_variable (rtl)
3862 rtx rtl;
3864 if (GET_CODE (rtl) == REG)
3865 /* This is a register variable. */
3866 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3867 else if (GET_CODE (rtl) == MEM
3868 && GET_CODE (XEXP (rtl, 0)) == REG
3869 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3870 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3871 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3872 /* This is a variable-sized structure. */
3873 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3876 /* Like use_variable except that it outputs the USEs after INSN
3877 instead of at the end of the insn-chain. */
3879 void
3880 use_variable_after (rtl, insn)
3881 rtx rtl, insn;
3883 if (GET_CODE (rtl) == REG)
3884 /* This is a register variable. */
3885 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3886 else if (GET_CODE (rtl) == MEM
3887 && GET_CODE (XEXP (rtl, 0)) == REG
3888 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3889 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3890 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3891 /* This is a variable-sized structure. */
3892 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3896 max_parm_reg_num ()
3898 return max_parm_reg;
3901 /* Return the first insn following those generated by `assign_parms'. */
3904 get_first_nonparm_insn ()
3906 if (last_parm_insn)
3907 return NEXT_INSN (last_parm_insn);
3908 return get_insns ();
3911 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3912 Crash if there is none. */
3915 get_first_block_beg ()
3917 register rtx searcher;
3918 register rtx insn = get_first_nonparm_insn ();
3920 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3921 if (GET_CODE (searcher) == NOTE
3922 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3923 return searcher;
3925 abort (); /* Invalid call to this function. (See comments above.) */
3926 return NULL_RTX;
3929 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3930 This means a type for which function calls must pass an address to the
3931 function or get an address back from the function.
3932 EXP may be a type node or an expression (whose type is tested). */
3935 aggregate_value_p (exp)
3936 tree exp;
3938 int i, regno, nregs;
3939 rtx reg;
3940 tree type;
3941 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3942 type = exp;
3943 else
3944 type = TREE_TYPE (exp);
3946 if (RETURN_IN_MEMORY (type))
3947 return 1;
3948 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3949 and thus can't be returned in registers. */
3950 if (TREE_ADDRESSABLE (type))
3951 return 1;
3952 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3953 return 1;
3954 /* Make sure we have suitable call-clobbered regs to return
3955 the value in; if not, we must return it in memory. */
3956 reg = hard_function_value (type, 0);
3958 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3959 it is OK. */
3960 if (GET_CODE (reg) != REG)
3961 return 0;
3963 regno = REGNO (reg);
3964 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3965 for (i = 0; i < nregs; i++)
3966 if (! call_used_regs[regno + i])
3967 return 1;
3968 return 0;
3971 /* Assign RTL expressions to the function's parameters.
3972 This may involve copying them into registers and using
3973 those registers as the RTL for them.
3975 If SECOND_TIME is non-zero it means that this function is being
3976 called a second time. This is done by integrate.c when a function's
3977 compilation is deferred. We need to come back here in case the
3978 FUNCTION_ARG macro computes items needed for the rest of the compilation
3979 (such as changing which registers are fixed or caller-saved). But suppress
3980 writing any insns or setting DECL_RTL of anything in this case. */
3982 void
3983 assign_parms (fndecl, second_time)
3984 tree fndecl;
3985 int second_time;
3987 register tree parm;
3988 register rtx entry_parm = 0;
3989 register rtx stack_parm = 0;
3990 CUMULATIVE_ARGS args_so_far;
3991 enum machine_mode promoted_mode, passed_mode;
3992 enum machine_mode nominal_mode, promoted_nominal_mode;
3993 int unsignedp;
3994 /* Total space needed so far for args on the stack,
3995 given as a constant and a tree-expression. */
3996 struct args_size stack_args_size;
3997 tree fntype = TREE_TYPE (fndecl);
3998 tree fnargs = DECL_ARGUMENTS (fndecl);
3999 /* This is used for the arg pointer when referring to stack args. */
4000 rtx internal_arg_pointer;
4001 /* This is a dummy PARM_DECL that we used for the function result if
4002 the function returns a structure. */
4003 tree function_result_decl = 0;
4004 int varargs_setup = 0;
4005 rtx conversion_insns = 0;
4007 /* Nonzero if the last arg is named `__builtin_va_alist',
4008 which is used on some machines for old-fashioned non-ANSI varargs.h;
4009 this should be stuck onto the stack as if it had arrived there. */
4010 int hide_last_arg
4011 = (current_function_varargs
4012 && fnargs
4013 && (parm = tree_last (fnargs)) != 0
4014 && DECL_NAME (parm)
4015 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4016 "__builtin_va_alist")));
4018 /* Nonzero if function takes extra anonymous args.
4019 This means the last named arg must be on the stack
4020 right before the anonymous ones. */
4021 int stdarg
4022 = (TYPE_ARG_TYPES (fntype) != 0
4023 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4024 != void_type_node));
4026 current_function_stdarg = stdarg;
4028 /* If the reg that the virtual arg pointer will be translated into is
4029 not a fixed reg or is the stack pointer, make a copy of the virtual
4030 arg pointer, and address parms via the copy. The frame pointer is
4031 considered fixed even though it is not marked as such.
4033 The second time through, simply use ap to avoid generating rtx. */
4035 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4036 || ! (fixed_regs[ARG_POINTER_REGNUM]
4037 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4038 && ! second_time)
4039 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4040 else
4041 internal_arg_pointer = virtual_incoming_args_rtx;
4042 current_function_internal_arg_pointer = internal_arg_pointer;
4044 stack_args_size.constant = 0;
4045 stack_args_size.var = 0;
4047 /* If struct value address is treated as the first argument, make it so. */
4048 if (aggregate_value_p (DECL_RESULT (fndecl))
4049 && ! current_function_returns_pcc_struct
4050 && struct_value_incoming_rtx == 0)
4052 tree type = build_pointer_type (TREE_TYPE (fntype));
4054 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4056 DECL_ARG_TYPE (function_result_decl) = type;
4057 TREE_CHAIN (function_result_decl) = fnargs;
4058 fnargs = function_result_decl;
4061 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4062 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4063 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4065 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4066 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4067 #else
4068 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4069 #endif
4071 /* We haven't yet found an argument that we must push and pretend the
4072 caller did. */
4073 current_function_pretend_args_size = 0;
4075 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4077 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4078 struct args_size stack_offset;
4079 struct args_size arg_size;
4080 int passed_pointer = 0;
4081 int did_conversion = 0;
4082 tree passed_type = DECL_ARG_TYPE (parm);
4083 tree nominal_type = TREE_TYPE (parm);
4084 int pretend_named;
4086 /* Set LAST_NAMED if this is last named arg before some
4087 anonymous args. */
4088 int last_named = ((TREE_CHAIN (parm) == 0
4089 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4090 && (stdarg || current_function_varargs));
4091 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4092 most machines, if this is a varargs/stdarg function, then we treat
4093 the last named arg as if it were anonymous too. */
4094 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4096 if (TREE_TYPE (parm) == error_mark_node
4097 /* This can happen after weird syntax errors
4098 or if an enum type is defined among the parms. */
4099 || TREE_CODE (parm) != PARM_DECL
4100 || passed_type == NULL)
4102 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4103 = gen_rtx_MEM (BLKmode, const0_rtx);
4104 TREE_USED (parm) = 1;
4105 continue;
4108 /* For varargs.h function, save info about regs and stack space
4109 used by the individual args, not including the va_alist arg. */
4110 if (hide_last_arg && last_named)
4111 current_function_args_info = args_so_far;
4113 /* Find mode of arg as it is passed, and mode of arg
4114 as it should be during execution of this function. */
4115 passed_mode = TYPE_MODE (passed_type);
4116 nominal_mode = TYPE_MODE (nominal_type);
4118 /* If the parm's mode is VOID, its value doesn't matter,
4119 and avoid the usual things like emit_move_insn that could crash. */
4120 if (nominal_mode == VOIDmode)
4122 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4123 continue;
4126 /* If the parm is to be passed as a transparent union, use the
4127 type of the first field for the tests below. We have already
4128 verified that the modes are the same. */
4129 if (DECL_TRANSPARENT_UNION (parm)
4130 || TYPE_TRANSPARENT_UNION (passed_type))
4131 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4133 /* See if this arg was passed by invisible reference. It is if
4134 it is an object whose size depends on the contents of the
4135 object itself or if the machine requires these objects be passed
4136 that way. */
4138 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4139 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4140 || TREE_ADDRESSABLE (passed_type)
4141 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4142 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4143 passed_type, named_arg)
4144 #endif
4147 passed_type = nominal_type = build_pointer_type (passed_type);
4148 passed_pointer = 1;
4149 passed_mode = nominal_mode = Pmode;
4152 promoted_mode = passed_mode;
4154 #ifdef PROMOTE_FUNCTION_ARGS
4155 /* Compute the mode in which the arg is actually extended to. */
4156 unsignedp = TREE_UNSIGNED (passed_type);
4157 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4158 #endif
4160 /* Let machine desc say which reg (if any) the parm arrives in.
4161 0 means it arrives on the stack. */
4162 #ifdef FUNCTION_INCOMING_ARG
4163 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4164 passed_type, named_arg);
4165 #else
4166 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4167 passed_type, named_arg);
4168 #endif
4170 if (entry_parm == 0)
4171 promoted_mode = passed_mode;
4173 #ifdef SETUP_INCOMING_VARARGS
4174 /* If this is the last named parameter, do any required setup for
4175 varargs or stdargs. We need to know about the case of this being an
4176 addressable type, in which case we skip the registers it
4177 would have arrived in.
4179 For stdargs, LAST_NAMED will be set for two parameters, the one that
4180 is actually the last named, and the dummy parameter. We only
4181 want to do this action once.
4183 Also, indicate when RTL generation is to be suppressed. */
4184 if (last_named && !varargs_setup)
4186 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4187 current_function_pretend_args_size,
4188 second_time);
4189 varargs_setup = 1;
4191 #endif
4193 /* Determine parm's home in the stack,
4194 in case it arrives in the stack or we should pretend it did.
4196 Compute the stack position and rtx where the argument arrives
4197 and its size.
4199 There is one complexity here: If this was a parameter that would
4200 have been passed in registers, but wasn't only because it is
4201 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4202 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4203 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4204 0 as it was the previous time. */
4206 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4207 locate_and_pad_parm (nominal_mode, passed_type,
4208 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4210 #else
4211 #ifdef FUNCTION_INCOMING_ARG
4212 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4213 passed_type,
4214 pretend_named) != 0,
4215 #else
4216 FUNCTION_ARG (args_so_far, promoted_mode,
4217 passed_type,
4218 pretend_named) != 0,
4219 #endif
4220 #endif
4221 fndecl, &stack_args_size, &stack_offset, &arg_size);
4223 if (! second_time)
4225 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4227 if (offset_rtx == const0_rtx)
4228 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4229 else
4230 stack_parm = gen_rtx_MEM (nominal_mode,
4231 gen_rtx_PLUS (Pmode,
4232 internal_arg_pointer,
4233 offset_rtx));
4235 /* If this is a memory ref that contains aggregate components,
4236 mark it as such for cse and loop optimize. Likewise if it
4237 is readonly. */
4238 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4239 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4240 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4243 /* If this parameter was passed both in registers and in the stack,
4244 use the copy on the stack. */
4245 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4246 entry_parm = 0;
4248 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4249 /* If this parm was passed part in regs and part in memory,
4250 pretend it arrived entirely in memory
4251 by pushing the register-part onto the stack.
4253 In the special case of a DImode or DFmode that is split,
4254 we could put it together in a pseudoreg directly,
4255 but for now that's not worth bothering with. */
4257 if (entry_parm)
4259 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4260 passed_type, named_arg);
4262 if (nregs > 0)
4264 current_function_pretend_args_size
4265 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4266 / (PARM_BOUNDARY / BITS_PER_UNIT)
4267 * (PARM_BOUNDARY / BITS_PER_UNIT));
4269 if (! second_time)
4271 /* Handle calls that pass values in multiple non-contiguous
4272 locations. The Irix 6 ABI has examples of this. */
4273 if (GET_CODE (entry_parm) == PARALLEL)
4274 emit_group_store (validize_mem (stack_parm), entry_parm,
4275 int_size_in_bytes (TREE_TYPE (parm)),
4276 (TYPE_ALIGN (TREE_TYPE (parm))
4277 / BITS_PER_UNIT));
4278 else
4279 move_block_from_reg (REGNO (entry_parm),
4280 validize_mem (stack_parm), nregs,
4281 int_size_in_bytes (TREE_TYPE (parm)));
4283 entry_parm = stack_parm;
4286 #endif
4288 /* If we didn't decide this parm came in a register,
4289 by default it came on the stack. */
4290 if (entry_parm == 0)
4291 entry_parm = stack_parm;
4293 /* Record permanently how this parm was passed. */
4294 if (! second_time)
4295 DECL_INCOMING_RTL (parm) = entry_parm;
4297 /* If there is actually space on the stack for this parm,
4298 count it in stack_args_size; otherwise set stack_parm to 0
4299 to indicate there is no preallocated stack slot for the parm. */
4301 if (entry_parm == stack_parm
4302 || (GET_CODE (entry_parm) == PARALLEL
4303 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4304 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4305 /* On some machines, even if a parm value arrives in a register
4306 there is still an (uninitialized) stack slot allocated for it.
4308 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4309 whether this parameter already has a stack slot allocated,
4310 because an arg block exists only if current_function_args_size
4311 is larger than some threshold, and we haven't calculated that
4312 yet. So, for now, we just assume that stack slots never exist
4313 in this case. */
4314 || REG_PARM_STACK_SPACE (fndecl) > 0
4315 #endif
4318 stack_args_size.constant += arg_size.constant;
4319 if (arg_size.var)
4320 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4322 else
4323 /* No stack slot was pushed for this parm. */
4324 stack_parm = 0;
4326 /* Update info on where next arg arrives in registers. */
4328 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4329 passed_type, named_arg);
4331 /* If this is our second time through, we are done with this parm. */
4332 if (second_time)
4333 continue;
4335 /* If we can't trust the parm stack slot to be aligned enough
4336 for its ultimate type, don't use that slot after entry.
4337 We'll make another stack slot, if we need one. */
4339 int thisparm_boundary
4340 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4342 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4343 stack_parm = 0;
4346 /* If parm was passed in memory, and we need to convert it on entry,
4347 don't store it back in that same slot. */
4348 if (entry_parm != 0
4349 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4350 stack_parm = 0;
4352 #if 0
4353 /* Now adjust STACK_PARM to the mode and precise location
4354 where this parameter should live during execution,
4355 if we discover that it must live in the stack during execution.
4356 To make debuggers happier on big-endian machines, we store
4357 the value in the last bytes of the space available. */
4359 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4360 && stack_parm != 0)
4362 rtx offset_rtx;
4364 if (BYTES_BIG_ENDIAN
4365 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4366 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4367 - GET_MODE_SIZE (nominal_mode));
4369 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4370 if (offset_rtx == const0_rtx)
4371 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4372 else
4373 stack_parm = gen_rtx_MEM (nominal_mode,
4374 gen_rtx_PLUS (Pmode,
4375 internal_arg_pointer,
4376 offset_rtx));
4378 /* If this is a memory ref that contains aggregate components,
4379 mark it as such for cse and loop optimize. */
4380 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4382 #endif /* 0 */
4384 #ifdef STACK_REGS
4385 /* We need this "use" info, because the gcc-register->stack-register
4386 converter in reg-stack.c needs to know which registers are active
4387 at the start of the function call. The actual parameter loading
4388 instructions are not always available then anymore, since they might
4389 have been optimised away. */
4391 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4392 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4393 #endif
4395 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4396 in the mode in which it arrives.
4397 STACK_PARM is an RTX for a stack slot where the parameter can live
4398 during the function (in case we want to put it there).
4399 STACK_PARM is 0 if no stack slot was pushed for it.
4401 Now output code if necessary to convert ENTRY_PARM to
4402 the type in which this function declares it,
4403 and store that result in an appropriate place,
4404 which may be a pseudo reg, may be STACK_PARM,
4405 or may be a local stack slot if STACK_PARM is 0.
4407 Set DECL_RTL to that place. */
4409 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4411 /* If a BLKmode arrives in registers, copy it to a stack slot.
4412 Handle calls that pass values in multiple non-contiguous
4413 locations. The Irix 6 ABI has examples of this. */
4414 if (GET_CODE (entry_parm) == REG
4415 || GET_CODE (entry_parm) == PARALLEL)
4417 int size_stored
4418 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4419 UNITS_PER_WORD);
4421 /* Note that we will be storing an integral number of words.
4422 So we have to be careful to ensure that we allocate an
4423 integral number of words. We do this below in the
4424 assign_stack_local if space was not allocated in the argument
4425 list. If it was, this will not work if PARM_BOUNDARY is not
4426 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4427 if it becomes a problem. */
4429 if (stack_parm == 0)
4431 stack_parm
4432 = assign_stack_local (GET_MODE (entry_parm),
4433 size_stored, 0);
4435 /* If this is a memory ref that contains aggregate
4436 components, mark it as such for cse and loop optimize. */
4437 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4440 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4441 abort ();
4443 if (TREE_READONLY (parm))
4444 RTX_UNCHANGING_P (stack_parm) = 1;
4446 /* Handle calls that pass values in multiple non-contiguous
4447 locations. The Irix 6 ABI has examples of this. */
4448 if (GET_CODE (entry_parm) == PARALLEL)
4449 emit_group_store (validize_mem (stack_parm), entry_parm,
4450 int_size_in_bytes (TREE_TYPE (parm)),
4451 (TYPE_ALIGN (TREE_TYPE (parm))
4452 / BITS_PER_UNIT));
4453 else
4454 move_block_from_reg (REGNO (entry_parm),
4455 validize_mem (stack_parm),
4456 size_stored / UNITS_PER_WORD,
4457 int_size_in_bytes (TREE_TYPE (parm)));
4459 DECL_RTL (parm) = stack_parm;
4461 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4462 && ! DECL_INLINE (fndecl))
4463 /* layout_decl may set this. */
4464 || TREE_ADDRESSABLE (parm)
4465 || TREE_SIDE_EFFECTS (parm)
4466 /* If -ffloat-store specified, don't put explicit
4467 float variables into registers. */
4468 || (flag_float_store
4469 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4470 /* Always assign pseudo to structure return or item passed
4471 by invisible reference. */
4472 || passed_pointer || parm == function_result_decl)
4474 /* Store the parm in a pseudoregister during the function, but we
4475 may need to do it in a wider mode. */
4477 register rtx parmreg;
4478 int regno, regnoi = 0, regnor = 0;
4480 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4482 promoted_nominal_mode
4483 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4485 parmreg = gen_reg_rtx (promoted_nominal_mode);
4486 mark_user_reg (parmreg);
4488 /* If this was an item that we received a pointer to, set DECL_RTL
4489 appropriately. */
4490 if (passed_pointer)
4492 DECL_RTL (parm)
4493 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4494 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4496 else
4497 DECL_RTL (parm) = parmreg;
4499 /* Copy the value into the register. */
4500 if (nominal_mode != passed_mode
4501 || promoted_nominal_mode != promoted_mode)
4503 int save_tree_used;
4504 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4505 mode, by the caller. We now have to convert it to
4506 NOMINAL_MODE, if different. However, PARMREG may be in
4507 a different mode than NOMINAL_MODE if it is being stored
4508 promoted.
4510 If ENTRY_PARM is a hard register, it might be in a register
4511 not valid for operating in its mode (e.g., an odd-numbered
4512 register for a DFmode). In that case, moves are the only
4513 thing valid, so we can't do a convert from there. This
4514 occurs when the calling sequence allow such misaligned
4515 usages.
4517 In addition, the conversion may involve a call, which could
4518 clobber parameters which haven't been copied to pseudo
4519 registers yet. Therefore, we must first copy the parm to
4520 a pseudo reg here, and save the conversion until after all
4521 parameters have been moved. */
4523 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4525 emit_move_insn (tempreg, validize_mem (entry_parm));
4527 push_to_sequence (conversion_insns);
4528 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4530 /* TREE_USED gets set erroneously during expand_assignment. */
4531 save_tree_used = TREE_USED (parm);
4532 expand_assignment (parm,
4533 make_tree (nominal_type, tempreg), 0, 0);
4534 TREE_USED (parm) = save_tree_used;
4535 conversion_insns = get_insns ();
4536 did_conversion = 1;
4537 end_sequence ();
4539 else
4540 emit_move_insn (parmreg, validize_mem (entry_parm));
4542 /* If we were passed a pointer but the actual value
4543 can safely live in a register, put it in one. */
4544 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4545 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4546 && ! DECL_INLINE (fndecl))
4547 /* layout_decl may set this. */
4548 || TREE_ADDRESSABLE (parm)
4549 || TREE_SIDE_EFFECTS (parm)
4550 /* If -ffloat-store specified, don't put explicit
4551 float variables into registers. */
4552 || (flag_float_store
4553 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4555 /* We can't use nominal_mode, because it will have been set to
4556 Pmode above. We must use the actual mode of the parm. */
4557 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4558 mark_user_reg (parmreg);
4559 emit_move_insn (parmreg, DECL_RTL (parm));
4560 DECL_RTL (parm) = parmreg;
4561 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4562 now the parm. */
4563 stack_parm = 0;
4565 #ifdef FUNCTION_ARG_CALLEE_COPIES
4566 /* If we are passed an arg by reference and it is our responsibility
4567 to make a copy, do it now.
4568 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4569 original argument, so we must recreate them in the call to
4570 FUNCTION_ARG_CALLEE_COPIES. */
4571 /* ??? Later add code to handle the case that if the argument isn't
4572 modified, don't do the copy. */
4574 else if (passed_pointer
4575 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4576 TYPE_MODE (DECL_ARG_TYPE (parm)),
4577 DECL_ARG_TYPE (parm),
4578 named_arg)
4579 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4581 rtx copy;
4582 tree type = DECL_ARG_TYPE (parm);
4584 /* This sequence may involve a library call perhaps clobbering
4585 registers that haven't been copied to pseudos yet. */
4587 push_to_sequence (conversion_insns);
4589 if (TYPE_SIZE (type) == 0
4590 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4591 /* This is a variable sized object. */
4592 copy = gen_rtx_MEM (BLKmode,
4593 allocate_dynamic_stack_space
4594 (expr_size (parm), NULL_RTX,
4595 TYPE_ALIGN (type)));
4596 else
4597 copy = assign_stack_temp (TYPE_MODE (type),
4598 int_size_in_bytes (type), 1);
4599 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4600 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4602 store_expr (parm, copy, 0);
4603 emit_move_insn (parmreg, XEXP (copy, 0));
4604 if (current_function_check_memory_usage)
4605 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4606 XEXP (copy, 0), ptr_mode,
4607 GEN_INT (int_size_in_bytes (type)),
4608 TYPE_MODE (sizetype),
4609 GEN_INT (MEMORY_USE_RW),
4610 TYPE_MODE (integer_type_node));
4611 conversion_insns = get_insns ();
4612 did_conversion = 1;
4613 end_sequence ();
4615 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4617 /* In any case, record the parm's desired stack location
4618 in case we later discover it must live in the stack.
4620 If it is a COMPLEX value, store the stack location for both
4621 halves. */
4623 if (GET_CODE (parmreg) == CONCAT)
4624 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4625 else
4626 regno = REGNO (parmreg);
4628 if (regno >= max_parm_reg)
4630 rtx *new;
4631 int old_max_parm_reg = max_parm_reg;
4633 /* It's slow to expand this one register at a time,
4634 but it's also rare and we need max_parm_reg to be
4635 precisely correct. */
4636 max_parm_reg = regno + 1;
4637 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4638 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4639 old_max_parm_reg * sizeof (rtx));
4640 bzero ((char *) (new + old_max_parm_reg),
4641 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4642 parm_reg_stack_loc = new;
4645 if (GET_CODE (parmreg) == CONCAT)
4647 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4649 regnor = REGNO (gen_realpart (submode, parmreg));
4650 regnoi = REGNO (gen_imagpart (submode, parmreg));
4652 if (stack_parm != 0)
4654 parm_reg_stack_loc[regnor]
4655 = gen_realpart (submode, stack_parm);
4656 parm_reg_stack_loc[regnoi]
4657 = gen_imagpart (submode, stack_parm);
4659 else
4661 parm_reg_stack_loc[regnor] = 0;
4662 parm_reg_stack_loc[regnoi] = 0;
4665 else
4666 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4668 /* Mark the register as eliminable if we did no conversion
4669 and it was copied from memory at a fixed offset,
4670 and the arg pointer was not copied to a pseudo-reg.
4671 If the arg pointer is a pseudo reg or the offset formed
4672 an invalid address, such memory-equivalences
4673 as we make here would screw up life analysis for it. */
4674 if (nominal_mode == passed_mode
4675 && ! did_conversion
4676 && stack_parm != 0
4677 && GET_CODE (stack_parm) == MEM
4678 && stack_offset.var == 0
4679 && reg_mentioned_p (virtual_incoming_args_rtx,
4680 XEXP (stack_parm, 0)))
4682 rtx linsn = get_last_insn ();
4683 rtx sinsn, set;
4685 /* Mark complex types separately. */
4686 if (GET_CODE (parmreg) == CONCAT)
4687 /* Scan backwards for the set of the real and
4688 imaginary parts. */
4689 for (sinsn = linsn; sinsn != 0;
4690 sinsn = prev_nonnote_insn (sinsn))
4692 set = single_set (sinsn);
4693 if (set != 0
4694 && SET_DEST (set) == regno_reg_rtx [regnoi])
4695 REG_NOTES (sinsn)
4696 = gen_rtx_EXPR_LIST (REG_EQUIV,
4697 parm_reg_stack_loc[regnoi],
4698 REG_NOTES (sinsn));
4699 else if (set != 0
4700 && SET_DEST (set) == regno_reg_rtx [regnor])
4701 REG_NOTES (sinsn)
4702 = gen_rtx_EXPR_LIST (REG_EQUIV,
4703 parm_reg_stack_loc[regnor],
4704 REG_NOTES (sinsn));
4706 else if ((set = single_set (linsn)) != 0
4707 && SET_DEST (set) == parmreg)
4708 REG_NOTES (linsn)
4709 = gen_rtx_EXPR_LIST (REG_EQUIV,
4710 stack_parm, REG_NOTES (linsn));
4713 /* For pointer data type, suggest pointer register. */
4714 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4715 mark_reg_pointer (parmreg,
4716 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4717 / BITS_PER_UNIT));
4719 else
4721 /* Value must be stored in the stack slot STACK_PARM
4722 during function execution. */
4724 if (promoted_mode != nominal_mode)
4726 /* Conversion is required. */
4727 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4729 emit_move_insn (tempreg, validize_mem (entry_parm));
4731 push_to_sequence (conversion_insns);
4732 entry_parm = convert_to_mode (nominal_mode, tempreg,
4733 TREE_UNSIGNED (TREE_TYPE (parm)));
4734 if (stack_parm)
4736 /* ??? This may need a big-endian conversion on sparc64. */
4737 stack_parm = change_address (stack_parm, nominal_mode,
4738 NULL_RTX);
4740 conversion_insns = get_insns ();
4741 did_conversion = 1;
4742 end_sequence ();
4745 if (entry_parm != stack_parm)
4747 if (stack_parm == 0)
4749 stack_parm
4750 = assign_stack_local (GET_MODE (entry_parm),
4751 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4752 /* If this is a memory ref that contains aggregate components,
4753 mark it as such for cse and loop optimize. */
4754 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4757 if (promoted_mode != nominal_mode)
4759 push_to_sequence (conversion_insns);
4760 emit_move_insn (validize_mem (stack_parm),
4761 validize_mem (entry_parm));
4762 conversion_insns = get_insns ();
4763 end_sequence ();
4765 else
4766 emit_move_insn (validize_mem (stack_parm),
4767 validize_mem (entry_parm));
4769 if (current_function_check_memory_usage)
4771 push_to_sequence (conversion_insns);
4772 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4773 XEXP (stack_parm, 0), ptr_mode,
4774 GEN_INT (GET_MODE_SIZE (GET_MODE
4775 (entry_parm))),
4776 TYPE_MODE (sizetype),
4777 GEN_INT (MEMORY_USE_RW),
4778 TYPE_MODE (integer_type_node));
4780 conversion_insns = get_insns ();
4781 end_sequence ();
4783 DECL_RTL (parm) = stack_parm;
4786 /* If this "parameter" was the place where we are receiving the
4787 function's incoming structure pointer, set up the result. */
4788 if (parm == function_result_decl)
4790 tree result = DECL_RESULT (fndecl);
4791 tree restype = TREE_TYPE (result);
4793 DECL_RTL (result)
4794 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4796 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4797 AGGREGATE_TYPE_P (restype));
4800 if (TREE_THIS_VOLATILE (parm))
4801 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4802 if (TREE_READONLY (parm))
4803 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4806 /* Output all parameter conversion instructions (possibly including calls)
4807 now that all parameters have been copied out of hard registers. */
4808 emit_insns (conversion_insns);
4810 last_parm_insn = get_last_insn ();
4812 current_function_args_size = stack_args_size.constant;
4814 /* Adjust function incoming argument size for alignment and
4815 minimum length. */
4817 #ifdef REG_PARM_STACK_SPACE
4818 #ifndef MAYBE_REG_PARM_STACK_SPACE
4819 current_function_args_size = MAX (current_function_args_size,
4820 REG_PARM_STACK_SPACE (fndecl));
4821 #endif
4822 #endif
4824 #ifdef PREFERRED_STACK_BOUNDARY
4825 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
4827 current_function_args_size
4828 = ((current_function_args_size + STACK_BYTES - 1)
4829 / STACK_BYTES) * STACK_BYTES;
4830 #endif
4832 #ifdef ARGS_GROW_DOWNWARD
4833 current_function_arg_offset_rtx
4834 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4835 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4836 size_int (-stack_args_size.constant)),
4837 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4838 #else
4839 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4840 #endif
4842 /* See how many bytes, if any, of its args a function should try to pop
4843 on return. */
4845 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4846 current_function_args_size);
4848 /* For stdarg.h function, save info about
4849 regs and stack space used by the named args. */
4851 if (!hide_last_arg)
4852 current_function_args_info = args_so_far;
4854 /* Set the rtx used for the function return value. Put this in its
4855 own variable so any optimizers that need this information don't have
4856 to include tree.h. Do this here so it gets done when an inlined
4857 function gets output. */
4859 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4862 /* Indicate whether REGNO is an incoming argument to the current function
4863 that was promoted to a wider mode. If so, return the RTX for the
4864 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4865 that REGNO is promoted from and whether the promotion was signed or
4866 unsigned. */
4868 #ifdef PROMOTE_FUNCTION_ARGS
4871 promoted_input_arg (regno, pmode, punsignedp)
4872 int regno;
4873 enum machine_mode *pmode;
4874 int *punsignedp;
4876 tree arg;
4878 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4879 arg = TREE_CHAIN (arg))
4880 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4881 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4882 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4884 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4885 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4887 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4888 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4889 && mode != DECL_MODE (arg))
4891 *pmode = DECL_MODE (arg);
4892 *punsignedp = unsignedp;
4893 return DECL_INCOMING_RTL (arg);
4897 return 0;
4900 #endif
4902 /* Compute the size and offset from the start of the stacked arguments for a
4903 parm passed in mode PASSED_MODE and with type TYPE.
4905 INITIAL_OFFSET_PTR points to the current offset into the stacked
4906 arguments.
4908 The starting offset and size for this parm are returned in *OFFSET_PTR
4909 and *ARG_SIZE_PTR, respectively.
4911 IN_REGS is non-zero if the argument will be passed in registers. It will
4912 never be set if REG_PARM_STACK_SPACE is not defined.
4914 FNDECL is the function in which the argument was defined.
4916 There are two types of rounding that are done. The first, controlled by
4917 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4918 list to be aligned to the specific boundary (in bits). This rounding
4919 affects the initial and starting offsets, but not the argument size.
4921 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4922 optionally rounds the size of the parm to PARM_BOUNDARY. The
4923 initial offset is not affected by this rounding, while the size always
4924 is and the starting offset may be. */
4926 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4927 initial_offset_ptr is positive because locate_and_pad_parm's
4928 callers pass in the total size of args so far as
4929 initial_offset_ptr. arg_size_ptr is always positive.*/
4931 void
4932 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4933 initial_offset_ptr, offset_ptr, arg_size_ptr)
4934 enum machine_mode passed_mode;
4935 tree type;
4936 int in_regs;
4937 tree fndecl;
4938 struct args_size *initial_offset_ptr;
4939 struct args_size *offset_ptr;
4940 struct args_size *arg_size_ptr;
4942 tree sizetree
4943 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4944 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4945 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4947 #ifdef REG_PARM_STACK_SPACE
4948 /* If we have found a stack parm before we reach the end of the
4949 area reserved for registers, skip that area. */
4950 if (! in_regs)
4952 int reg_parm_stack_space = 0;
4954 #ifdef MAYBE_REG_PARM_STACK_SPACE
4955 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4956 #else
4957 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4958 #endif
4959 if (reg_parm_stack_space > 0)
4961 if (initial_offset_ptr->var)
4963 initial_offset_ptr->var
4964 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4965 size_int (reg_parm_stack_space));
4966 initial_offset_ptr->constant = 0;
4968 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4969 initial_offset_ptr->constant = reg_parm_stack_space;
4972 #endif /* REG_PARM_STACK_SPACE */
4974 arg_size_ptr->var = 0;
4975 arg_size_ptr->constant = 0;
4977 #ifdef ARGS_GROW_DOWNWARD
4978 if (initial_offset_ptr->var)
4980 offset_ptr->constant = 0;
4981 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4982 initial_offset_ptr->var);
4984 else
4986 offset_ptr->constant = - initial_offset_ptr->constant;
4987 offset_ptr->var = 0;
4989 if (where_pad != none
4990 && (TREE_CODE (sizetree) != INTEGER_CST
4991 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4992 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4993 SUB_PARM_SIZE (*offset_ptr, sizetree);
4994 if (where_pad != downward)
4995 pad_to_arg_alignment (offset_ptr, boundary);
4996 if (initial_offset_ptr->var)
4998 arg_size_ptr->var = size_binop (MINUS_EXPR,
4999 size_binop (MINUS_EXPR,
5000 integer_zero_node,
5001 initial_offset_ptr->var),
5002 offset_ptr->var);
5004 else
5006 arg_size_ptr->constant = (- initial_offset_ptr->constant
5007 - offset_ptr->constant);
5009 #else /* !ARGS_GROW_DOWNWARD */
5010 pad_to_arg_alignment (initial_offset_ptr, boundary);
5011 *offset_ptr = *initial_offset_ptr;
5013 #ifdef PUSH_ROUNDING
5014 if (passed_mode != BLKmode)
5015 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5016 #endif
5018 /* Pad_below needs the pre-rounded size to know how much to pad below
5019 so this must be done before rounding up. */
5020 if (where_pad == downward
5021 /* However, BLKmode args passed in regs have their padding done elsewhere.
5022 The stack slot must be able to hold the entire register. */
5023 && !(in_regs && passed_mode == BLKmode))
5024 pad_below (offset_ptr, passed_mode, sizetree);
5026 if (where_pad != none
5027 && (TREE_CODE (sizetree) != INTEGER_CST
5028 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5029 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5031 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5032 #endif /* ARGS_GROW_DOWNWARD */
5035 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5036 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5038 static void
5039 pad_to_arg_alignment (offset_ptr, boundary)
5040 struct args_size *offset_ptr;
5041 int boundary;
5043 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5045 if (boundary > BITS_PER_UNIT)
5047 if (offset_ptr->var)
5049 offset_ptr->var =
5050 #ifdef ARGS_GROW_DOWNWARD
5051 round_down
5052 #else
5053 round_up
5054 #endif
5055 (ARGS_SIZE_TREE (*offset_ptr),
5056 boundary / BITS_PER_UNIT);
5057 offset_ptr->constant = 0; /*?*/
5059 else
5060 offset_ptr->constant =
5061 #ifdef ARGS_GROW_DOWNWARD
5062 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5063 #else
5064 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5065 #endif
5069 #ifndef ARGS_GROW_DOWNWARD
5070 static void
5071 pad_below (offset_ptr, passed_mode, sizetree)
5072 struct args_size *offset_ptr;
5073 enum machine_mode passed_mode;
5074 tree sizetree;
5076 if (passed_mode != BLKmode)
5078 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5079 offset_ptr->constant
5080 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5081 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5082 - GET_MODE_SIZE (passed_mode));
5084 else
5086 if (TREE_CODE (sizetree) != INTEGER_CST
5087 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5089 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5090 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5091 /* Add it in. */
5092 ADD_PARM_SIZE (*offset_ptr, s2);
5093 SUB_PARM_SIZE (*offset_ptr, sizetree);
5097 #endif
5099 #ifdef ARGS_GROW_DOWNWARD
5100 static tree
5101 round_down (value, divisor)
5102 tree value;
5103 int divisor;
5105 return size_binop (MULT_EXPR,
5106 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5107 size_int (divisor));
5109 #endif
5111 /* Walk the tree of blocks describing the binding levels within a function
5112 and warn about uninitialized variables.
5113 This is done after calling flow_analysis and before global_alloc
5114 clobbers the pseudo-regs to hard regs. */
5116 void
5117 uninitialized_vars_warning (block)
5118 tree block;
5120 register tree decl, sub;
5121 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5123 if (TREE_CODE (decl) == VAR_DECL
5124 /* These warnings are unreliable for and aggregates
5125 because assigning the fields one by one can fail to convince
5126 flow.c that the entire aggregate was initialized.
5127 Unions are troublesome because members may be shorter. */
5128 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5129 && DECL_RTL (decl) != 0
5130 && GET_CODE (DECL_RTL (decl)) == REG
5131 /* Global optimizations can make it difficult to determine if a
5132 particular variable has been initialized. However, a VAR_DECL
5133 with a nonzero DECL_INITIAL had an initializer, so do not
5134 claim it is potentially uninitialized.
5136 We do not care about the actual value in DECL_INITIAL, so we do
5137 not worry that it may be a dangling pointer. */
5138 && DECL_INITIAL (decl) == NULL_TREE
5139 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5140 warning_with_decl (decl,
5141 "`%s' might be used uninitialized in this function");
5142 if (TREE_CODE (decl) == VAR_DECL
5143 && DECL_RTL (decl) != 0
5144 && GET_CODE (DECL_RTL (decl)) == REG
5145 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5146 warning_with_decl (decl,
5147 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5149 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5150 uninitialized_vars_warning (sub);
5153 /* Do the appropriate part of uninitialized_vars_warning
5154 but for arguments instead of local variables. */
5156 void
5157 setjmp_args_warning ()
5159 register tree decl;
5160 for (decl = DECL_ARGUMENTS (current_function_decl);
5161 decl; decl = TREE_CHAIN (decl))
5162 if (DECL_RTL (decl) != 0
5163 && GET_CODE (DECL_RTL (decl)) == REG
5164 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5165 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5168 /* If this function call setjmp, put all vars into the stack
5169 unless they were declared `register'. */
5171 void
5172 setjmp_protect (block)
5173 tree block;
5175 register tree decl, sub;
5176 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5177 if ((TREE_CODE (decl) == VAR_DECL
5178 || TREE_CODE (decl) == PARM_DECL)
5179 && DECL_RTL (decl) != 0
5180 && (GET_CODE (DECL_RTL (decl)) == REG
5181 || (GET_CODE (DECL_RTL (decl)) == MEM
5182 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5183 /* If this variable came from an inline function, it must be
5184 that its life doesn't overlap the setjmp. If there was a
5185 setjmp in the function, it would already be in memory. We
5186 must exclude such variable because their DECL_RTL might be
5187 set to strange things such as virtual_stack_vars_rtx. */
5188 && ! DECL_FROM_INLINE (decl)
5189 && (
5190 #ifdef NON_SAVING_SETJMP
5191 /* If longjmp doesn't restore the registers,
5192 don't put anything in them. */
5193 NON_SAVING_SETJMP
5195 #endif
5196 ! DECL_REGISTER (decl)))
5197 put_var_into_stack (decl);
5198 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5199 setjmp_protect (sub);
5202 /* Like the previous function, but for args instead of local variables. */
5204 void
5205 setjmp_protect_args ()
5207 register tree decl;
5208 for (decl = DECL_ARGUMENTS (current_function_decl);
5209 decl; decl = TREE_CHAIN (decl))
5210 if ((TREE_CODE (decl) == VAR_DECL
5211 || TREE_CODE (decl) == PARM_DECL)
5212 && DECL_RTL (decl) != 0
5213 && (GET_CODE (DECL_RTL (decl)) == REG
5214 || (GET_CODE (DECL_RTL (decl)) == MEM
5215 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5216 && (
5217 /* If longjmp doesn't restore the registers,
5218 don't put anything in them. */
5219 #ifdef NON_SAVING_SETJMP
5220 NON_SAVING_SETJMP
5222 #endif
5223 ! DECL_REGISTER (decl)))
5224 put_var_into_stack (decl);
5227 /* Return the context-pointer register corresponding to DECL,
5228 or 0 if it does not need one. */
5231 lookup_static_chain (decl)
5232 tree decl;
5234 tree context = decl_function_context (decl);
5235 tree link;
5237 if (context == 0
5238 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5239 return 0;
5241 /* We treat inline_function_decl as an alias for the current function
5242 because that is the inline function whose vars, types, etc.
5243 are being merged into the current function.
5244 See expand_inline_function. */
5245 if (context == current_function_decl || context == inline_function_decl)
5246 return virtual_stack_vars_rtx;
5248 for (link = context_display; link; link = TREE_CHAIN (link))
5249 if (TREE_PURPOSE (link) == context)
5250 return RTL_EXPR_RTL (TREE_VALUE (link));
5252 abort ();
5255 /* Convert a stack slot address ADDR for variable VAR
5256 (from a containing function)
5257 into an address valid in this function (using a static chain). */
5260 fix_lexical_addr (addr, var)
5261 rtx addr;
5262 tree var;
5264 rtx basereg;
5265 HOST_WIDE_INT displacement;
5266 tree context = decl_function_context (var);
5267 struct function *fp;
5268 rtx base = 0;
5270 /* If this is the present function, we need not do anything. */
5271 if (context == current_function_decl || context == inline_function_decl)
5272 return addr;
5274 for (fp = outer_function_chain; fp; fp = fp->next)
5275 if (fp->decl == context)
5276 break;
5278 if (fp == 0)
5279 abort ();
5281 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5282 addr = XEXP (XEXP (addr, 0), 0);
5284 /* Decode given address as base reg plus displacement. */
5285 if (GET_CODE (addr) == REG)
5286 basereg = addr, displacement = 0;
5287 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5288 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5289 else
5290 abort ();
5292 /* We accept vars reached via the containing function's
5293 incoming arg pointer and via its stack variables pointer. */
5294 if (basereg == fp->internal_arg_pointer)
5296 /* If reached via arg pointer, get the arg pointer value
5297 out of that function's stack frame.
5299 There are two cases: If a separate ap is needed, allocate a
5300 slot in the outer function for it and dereference it that way.
5301 This is correct even if the real ap is actually a pseudo.
5302 Otherwise, just adjust the offset from the frame pointer to
5303 compensate. */
5305 #ifdef NEED_SEPARATE_AP
5306 rtx addr;
5308 if (fp->arg_pointer_save_area == 0)
5309 fp->arg_pointer_save_area
5310 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5312 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5313 addr = memory_address (Pmode, addr);
5315 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5316 #else
5317 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5318 base = lookup_static_chain (var);
5319 #endif
5322 else if (basereg == virtual_stack_vars_rtx)
5324 /* This is the same code as lookup_static_chain, duplicated here to
5325 avoid an extra call to decl_function_context. */
5326 tree link;
5328 for (link = context_display; link; link = TREE_CHAIN (link))
5329 if (TREE_PURPOSE (link) == context)
5331 base = RTL_EXPR_RTL (TREE_VALUE (link));
5332 break;
5336 if (base == 0)
5337 abort ();
5339 /* Use same offset, relative to appropriate static chain or argument
5340 pointer. */
5341 return plus_constant (base, displacement);
5344 /* Return the address of the trampoline for entering nested fn FUNCTION.
5345 If necessary, allocate a trampoline (in the stack frame)
5346 and emit rtl to initialize its contents (at entry to this function). */
5349 trampoline_address (function)
5350 tree function;
5352 tree link;
5353 tree rtlexp;
5354 rtx tramp;
5355 struct function *fp;
5356 tree fn_context;
5358 /* Find an existing trampoline and return it. */
5359 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5360 if (TREE_PURPOSE (link) == function)
5361 return
5362 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5364 for (fp = outer_function_chain; fp; fp = fp->next)
5365 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5366 if (TREE_PURPOSE (link) == function)
5368 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5369 function);
5370 return round_trampoline_addr (tramp);
5373 /* None exists; we must make one. */
5375 /* Find the `struct function' for the function containing FUNCTION. */
5376 fp = 0;
5377 fn_context = decl_function_context (function);
5378 if (fn_context != current_function_decl
5379 && fn_context != inline_function_decl)
5380 for (fp = outer_function_chain; fp; fp = fp->next)
5381 if (fp->decl == fn_context)
5382 break;
5384 /* Allocate run-time space for this trampoline
5385 (usually in the defining function's stack frame). */
5386 #ifdef ALLOCATE_TRAMPOLINE
5387 tramp = ALLOCATE_TRAMPOLINE (fp);
5388 #else
5389 /* If rounding needed, allocate extra space
5390 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5391 #ifdef TRAMPOLINE_ALIGNMENT
5392 #define TRAMPOLINE_REAL_SIZE \
5393 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5394 #else
5395 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5396 #endif
5397 if (fp != 0)
5398 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5399 else
5400 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5401 #endif
5403 /* Record the trampoline for reuse and note it for later initialization
5404 by expand_function_end. */
5405 if (fp != 0)
5407 push_obstacks (fp->function_maybepermanent_obstack,
5408 fp->function_maybepermanent_obstack);
5409 rtlexp = make_node (RTL_EXPR);
5410 RTL_EXPR_RTL (rtlexp) = tramp;
5411 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5412 pop_obstacks ();
5414 else
5416 /* Make the RTL_EXPR node temporary, not momentary, so that the
5417 trampoline_list doesn't become garbage. */
5418 int momentary = suspend_momentary ();
5419 rtlexp = make_node (RTL_EXPR);
5420 resume_momentary (momentary);
5422 RTL_EXPR_RTL (rtlexp) = tramp;
5423 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5426 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5427 return round_trampoline_addr (tramp);
5430 /* Given a trampoline address,
5431 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5433 static rtx
5434 round_trampoline_addr (tramp)
5435 rtx tramp;
5437 #ifdef TRAMPOLINE_ALIGNMENT
5438 /* Round address up to desired boundary. */
5439 rtx temp = gen_reg_rtx (Pmode);
5440 temp = expand_binop (Pmode, add_optab, tramp,
5441 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5442 temp, 0, OPTAB_LIB_WIDEN);
5443 tramp = expand_binop (Pmode, and_optab, temp,
5444 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5445 temp, 0, OPTAB_LIB_WIDEN);
5446 #endif
5447 return tramp;
5450 /* The functions identify_blocks and reorder_blocks provide a way to
5451 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5452 duplicate portions of the RTL code. Call identify_blocks before
5453 changing the RTL, and call reorder_blocks after. */
5455 /* Put all this function's BLOCK nodes including those that are chained
5456 onto the first block into a vector, and return it.
5457 Also store in each NOTE for the beginning or end of a block
5458 the index of that block in the vector.
5459 The arguments are BLOCK, the chain of top-level blocks of the function,
5460 and INSNS, the insn chain of the function. */
5462 tree *
5463 identify_blocks (block, insns)
5464 tree block;
5465 rtx insns;
5467 int n_blocks;
5468 tree *block_vector;
5469 int *block_stack;
5470 int depth = 0;
5471 int next_block_number = 1;
5472 int current_block_number = 1;
5473 rtx insn;
5475 if (block == 0)
5476 return 0;
5478 n_blocks = all_blocks (block, 0);
5479 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5480 block_stack = (int *) alloca (n_blocks * sizeof (int));
5482 all_blocks (block, block_vector);
5484 for (insn = insns; insn; insn = NEXT_INSN (insn))
5485 if (GET_CODE (insn) == NOTE)
5487 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5489 block_stack[depth++] = current_block_number;
5490 current_block_number = next_block_number;
5491 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5493 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5495 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5496 current_block_number = block_stack[--depth];
5500 if (n_blocks != next_block_number)
5501 abort ();
5503 return block_vector;
5506 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5507 and a revised instruction chain, rebuild the tree structure
5508 of BLOCK nodes to correspond to the new order of RTL.
5509 The new block tree is inserted below TOP_BLOCK.
5510 Returns the current top-level block. */
5512 tree
5513 reorder_blocks (block_vector, block, insns)
5514 tree *block_vector;
5515 tree block;
5516 rtx insns;
5518 tree current_block = block;
5519 rtx insn;
5521 if (block_vector == 0)
5522 return block;
5524 /* Prune the old trees away, so that it doesn't get in the way. */
5525 BLOCK_SUBBLOCKS (current_block) = 0;
5526 BLOCK_CHAIN (current_block) = 0;
5528 for (insn = insns; insn; insn = NEXT_INSN (insn))
5529 if (GET_CODE (insn) == NOTE)
5531 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5533 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5534 /* If we have seen this block before, copy it. */
5535 if (TREE_ASM_WRITTEN (block))
5536 block = copy_node (block);
5537 BLOCK_SUBBLOCKS (block) = 0;
5538 TREE_ASM_WRITTEN (block) = 1;
5539 BLOCK_SUPERCONTEXT (block) = current_block;
5540 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5541 BLOCK_SUBBLOCKS (current_block) = block;
5542 current_block = block;
5543 NOTE_SOURCE_FILE (insn) = 0;
5545 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5547 BLOCK_SUBBLOCKS (current_block)
5548 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5549 current_block = BLOCK_SUPERCONTEXT (current_block);
5550 NOTE_SOURCE_FILE (insn) = 0;
5554 BLOCK_SUBBLOCKS (current_block)
5555 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5556 return current_block;
5559 /* Reverse the order of elements in the chain T of blocks,
5560 and return the new head of the chain (old last element). */
5562 static tree
5563 blocks_nreverse (t)
5564 tree t;
5566 register tree prev = 0, decl, next;
5567 for (decl = t; decl; decl = next)
5569 next = BLOCK_CHAIN (decl);
5570 BLOCK_CHAIN (decl) = prev;
5571 prev = decl;
5573 return prev;
5576 /* Count the subblocks of the list starting with BLOCK, and list them
5577 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5578 blocks. */
5580 static int
5581 all_blocks (block, vector)
5582 tree block;
5583 tree *vector;
5585 int n_blocks = 0;
5587 while (block)
5589 TREE_ASM_WRITTEN (block) = 0;
5591 /* Record this block. */
5592 if (vector)
5593 vector[n_blocks] = block;
5595 ++n_blocks;
5597 /* Record the subblocks, and their subblocks... */
5598 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5599 vector ? vector + n_blocks : 0);
5600 block = BLOCK_CHAIN (block);
5603 return n_blocks;
5606 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5607 and initialize static variables for generating RTL for the statements
5608 of the function. */
5610 void
5611 init_function_start (subr, filename, line)
5612 tree subr;
5613 char *filename;
5614 int line;
5616 init_stmt_for_function ();
5618 cse_not_expected = ! optimize;
5620 /* Caller save not needed yet. */
5621 caller_save_needed = 0;
5623 /* No stack slots have been made yet. */
5624 stack_slot_list = 0;
5626 /* There is no stack slot for handling nonlocal gotos. */
5627 nonlocal_goto_handler_slots = 0;
5628 nonlocal_goto_stack_level = 0;
5630 /* No labels have been declared for nonlocal use. */
5631 nonlocal_labels = 0;
5632 nonlocal_goto_handler_labels = 0;
5634 /* No function calls so far in this function. */
5635 function_call_count = 0;
5637 /* No parm regs have been allocated.
5638 (This is important for output_inline_function.) */
5639 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5641 /* Initialize the RTL mechanism. */
5642 init_emit ();
5644 /* Initialize the queue of pending postincrement and postdecrements,
5645 and some other info in expr.c. */
5646 init_expr ();
5648 /* We haven't done register allocation yet. */
5649 reg_renumber = 0;
5651 init_const_rtx_hash_table ();
5653 current_function_name = (*decl_printable_name) (subr, 2);
5655 /* Nonzero if this is a nested function that uses a static chain. */
5657 current_function_needs_context
5658 = (decl_function_context (current_function_decl) != 0
5659 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5661 /* Set if a call to setjmp is seen. */
5662 current_function_calls_setjmp = 0;
5664 /* Set if a call to longjmp is seen. */
5665 current_function_calls_longjmp = 0;
5667 current_function_calls_alloca = 0;
5668 current_function_has_nonlocal_label = 0;
5669 current_function_has_nonlocal_goto = 0;
5670 current_function_contains_functions = 0;
5671 current_function_sp_is_unchanging = 0;
5672 current_function_has_computed_jump = 0;
5673 current_function_is_thunk = 0;
5675 current_function_returns_pcc_struct = 0;
5676 current_function_returns_struct = 0;
5677 current_function_epilogue_delay_list = 0;
5678 current_function_uses_const_pool = 0;
5679 current_function_uses_pic_offset_table = 0;
5680 current_function_cannot_inline = 0;
5682 /* We have not yet needed to make a label to jump to for tail-recursion. */
5683 tail_recursion_label = 0;
5685 /* We haven't had a need to make a save area for ap yet. */
5687 arg_pointer_save_area = 0;
5689 /* No stack slots allocated yet. */
5690 frame_offset = 0;
5692 /* No SAVE_EXPRs in this function yet. */
5693 save_expr_regs = 0;
5695 /* No RTL_EXPRs in this function yet. */
5696 rtl_expr_chain = 0;
5698 /* Set up to allocate temporaries. */
5699 init_temp_slots ();
5701 /* Within function body, compute a type's size as soon it is laid out. */
5702 immediate_size_expand++;
5704 /* We haven't made any trampolines for this function yet. */
5705 trampoline_list = 0;
5707 init_pending_stack_adjust ();
5708 inhibit_defer_pop = 0;
5710 current_function_outgoing_args_size = 0;
5712 /* Prevent ever trying to delete the first instruction of a function.
5713 Also tell final how to output a linenum before the function prologue.
5714 Note linenums could be missing, e.g. when compiling a Java .class file. */
5715 if (line > 0)
5716 emit_line_note (filename, line);
5718 /* Make sure first insn is a note even if we don't want linenums.
5719 This makes sure the first insn will never be deleted.
5720 Also, final expects a note to appear there. */
5721 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5723 /* Set flags used by final.c. */
5724 if (aggregate_value_p (DECL_RESULT (subr)))
5726 #ifdef PCC_STATIC_STRUCT_RETURN
5727 current_function_returns_pcc_struct = 1;
5728 #endif
5729 current_function_returns_struct = 1;
5732 /* Warn if this value is an aggregate type,
5733 regardless of which calling convention we are using for it. */
5734 if (warn_aggregate_return
5735 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5736 warning ("function returns an aggregate");
5738 current_function_returns_pointer
5739 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5741 /* Indicate that we need to distinguish between the return value of the
5742 present function and the return value of a function being called. */
5743 rtx_equal_function_value_matters = 1;
5745 /* Indicate that we have not instantiated virtual registers yet. */
5746 virtuals_instantiated = 0;
5748 /* Indicate we have no need of a frame pointer yet. */
5749 frame_pointer_needed = 0;
5751 /* By default assume not varargs or stdarg. */
5752 current_function_varargs = 0;
5753 current_function_stdarg = 0;
5756 /* Indicate that the current function uses extra args
5757 not explicitly mentioned in the argument list in any fashion. */
5759 void
5760 mark_varargs ()
5762 current_function_varargs = 1;
5765 /* Expand a call to __main at the beginning of a possible main function. */
5767 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5768 #undef HAS_INIT_SECTION
5769 #define HAS_INIT_SECTION
5770 #endif
5772 void
5773 expand_main_function ()
5775 #if !defined (HAS_INIT_SECTION)
5776 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5777 VOIDmode, 0);
5778 #endif /* not HAS_INIT_SECTION */
5781 extern struct obstack permanent_obstack;
5783 /* Start the RTL for a new function, and set variables used for
5784 emitting RTL.
5785 SUBR is the FUNCTION_DECL node.
5786 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5787 the function's parameters, which must be run at any return statement. */
5789 void
5790 expand_function_start (subr, parms_have_cleanups)
5791 tree subr;
5792 int parms_have_cleanups;
5794 register int i;
5795 tree tem;
5796 rtx last_ptr = NULL_RTX;
5798 /* Make sure volatile mem refs aren't considered
5799 valid operands of arithmetic insns. */
5800 init_recog_no_volatile ();
5802 /* Set this before generating any memory accesses. */
5803 current_function_check_memory_usage
5804 = (flag_check_memory_usage
5805 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5807 current_function_instrument_entry_exit
5808 = (flag_instrument_function_entry_exit
5809 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5811 /* If function gets a static chain arg, store it in the stack frame.
5812 Do this first, so it gets the first stack slot offset. */
5813 if (current_function_needs_context)
5815 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5817 /* Delay copying static chain if it is not a register to avoid
5818 conflicts with regs used for parameters. */
5819 if (! SMALL_REGISTER_CLASSES
5820 || GET_CODE (static_chain_incoming_rtx) == REG)
5821 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5824 /* If the parameters of this function need cleaning up, get a label
5825 for the beginning of the code which executes those cleanups. This must
5826 be done before doing anything with return_label. */
5827 if (parms_have_cleanups)
5828 cleanup_label = gen_label_rtx ();
5829 else
5830 cleanup_label = 0;
5832 /* Make the label for return statements to jump to, if this machine
5833 does not have a one-instruction return and uses an epilogue,
5834 or if it returns a structure, or if it has parm cleanups. */
5835 #ifdef HAVE_return
5836 if (cleanup_label == 0 && HAVE_return
5837 && ! current_function_instrument_entry_exit
5838 && ! current_function_returns_pcc_struct
5839 && ! (current_function_returns_struct && ! optimize))
5840 return_label = 0;
5841 else
5842 return_label = gen_label_rtx ();
5843 #else
5844 return_label = gen_label_rtx ();
5845 #endif
5847 /* Initialize rtx used to return the value. */
5848 /* Do this before assign_parms so that we copy the struct value address
5849 before any library calls that assign parms might generate. */
5851 /* Decide whether to return the value in memory or in a register. */
5852 if (aggregate_value_p (DECL_RESULT (subr)))
5854 /* Returning something that won't go in a register. */
5855 register rtx value_address = 0;
5857 #ifdef PCC_STATIC_STRUCT_RETURN
5858 if (current_function_returns_pcc_struct)
5860 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5861 value_address = assemble_static_space (size);
5863 else
5864 #endif
5866 /* Expect to be passed the address of a place to store the value.
5867 If it is passed as an argument, assign_parms will take care of
5868 it. */
5869 if (struct_value_incoming_rtx)
5871 value_address = gen_reg_rtx (Pmode);
5872 emit_move_insn (value_address, struct_value_incoming_rtx);
5875 if (value_address)
5877 DECL_RTL (DECL_RESULT (subr))
5878 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5879 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5880 AGGREGATE_TYPE_P (TREE_TYPE
5881 (DECL_RESULT
5882 (subr))));
5885 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5886 /* If return mode is void, this decl rtl should not be used. */
5887 DECL_RTL (DECL_RESULT (subr)) = 0;
5888 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5890 /* If function will end with cleanup code for parms,
5891 compute the return values into a pseudo reg,
5892 which we will copy into the true return register
5893 after the cleanups are done. */
5895 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5897 #ifdef PROMOTE_FUNCTION_RETURN
5898 tree type = TREE_TYPE (DECL_RESULT (subr));
5899 int unsignedp = TREE_UNSIGNED (type);
5901 mode = promote_mode (type, mode, &unsignedp, 1);
5902 #endif
5904 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5906 else
5907 /* Scalar, returned in a register. */
5909 #ifdef FUNCTION_OUTGOING_VALUE
5910 DECL_RTL (DECL_RESULT (subr))
5911 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5912 #else
5913 DECL_RTL (DECL_RESULT (subr))
5914 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5915 #endif
5917 /* Mark this reg as the function's return value. */
5918 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5920 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5921 /* Needed because we may need to move this to memory
5922 in case it's a named return value whose address is taken. */
5923 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5927 /* Initialize rtx for parameters and local variables.
5928 In some cases this requires emitting insns. */
5930 assign_parms (subr, 0);
5932 /* Copy the static chain now if it wasn't a register. The delay is to
5933 avoid conflicts with the parameter passing registers. */
5935 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5936 if (GET_CODE (static_chain_incoming_rtx) != REG)
5937 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5939 /* The following was moved from init_function_start.
5940 The move is supposed to make sdb output more accurate. */
5941 /* Indicate the beginning of the function body,
5942 as opposed to parm setup. */
5943 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5945 /* If doing stupid allocation, mark parms as born here. */
5947 if (GET_CODE (get_last_insn ()) != NOTE)
5948 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5949 parm_birth_insn = get_last_insn ();
5951 if (obey_regdecls)
5953 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5954 use_variable (regno_reg_rtx[i]);
5956 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5957 use_variable (current_function_internal_arg_pointer);
5960 context_display = 0;
5961 if (current_function_needs_context)
5963 /* Fetch static chain values for containing functions. */
5964 tem = decl_function_context (current_function_decl);
5965 /* If not doing stupid register allocation copy the static chain
5966 pointer into a pseudo. If we have small register classes, copy
5967 the value from memory if static_chain_incoming_rtx is a REG. If
5968 we do stupid register allocation, we use the stack address
5969 generated above. */
5970 if (tem && ! obey_regdecls)
5972 /* If the static chain originally came in a register, put it back
5973 there, then move it out in the next insn. The reason for
5974 this peculiar code is to satisfy function integration. */
5975 if (SMALL_REGISTER_CLASSES
5976 && GET_CODE (static_chain_incoming_rtx) == REG)
5977 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5978 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5981 while (tem)
5983 tree rtlexp = make_node (RTL_EXPR);
5985 RTL_EXPR_RTL (rtlexp) = last_ptr;
5986 context_display = tree_cons (tem, rtlexp, context_display);
5987 tem = decl_function_context (tem);
5988 if (tem == 0)
5989 break;
5990 /* Chain thru stack frames, assuming pointer to next lexical frame
5991 is found at the place we always store it. */
5992 #ifdef FRAME_GROWS_DOWNWARD
5993 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5994 #endif
5995 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5996 memory_address (Pmode, last_ptr)));
5998 /* If we are not optimizing, ensure that we know that this
5999 piece of context is live over the entire function. */
6000 if (! optimize)
6001 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6002 save_expr_regs);
6006 if (current_function_instrument_entry_exit)
6008 rtx fun = DECL_RTL (current_function_decl);
6009 if (GET_CODE (fun) == MEM)
6010 fun = XEXP (fun, 0);
6011 else
6012 abort ();
6013 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6014 fun, Pmode,
6015 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6017 hard_frame_pointer_rtx),
6018 Pmode);
6021 /* After the display initializations is where the tail-recursion label
6022 should go, if we end up needing one. Ensure we have a NOTE here
6023 since some things (like trampolines) get placed before this. */
6024 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6026 /* Evaluate now the sizes of any types declared among the arguments. */
6027 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6029 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6030 EXPAND_MEMORY_USE_BAD);
6031 /* Flush the queue in case this parameter declaration has
6032 side-effects. */
6033 emit_queue ();
6036 /* Make sure there is a line number after the function entry setup code. */
6037 force_next_line_note ();
6040 /* Generate RTL for the end of the current function.
6041 FILENAME and LINE are the current position in the source file.
6043 It is up to language-specific callers to do cleanups for parameters--
6044 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6046 void
6047 expand_function_end (filename, line, end_bindings)
6048 char *filename;
6049 int line;
6050 int end_bindings;
6052 register int i;
6053 tree link;
6055 #ifdef TRAMPOLINE_TEMPLATE
6056 static rtx initial_trampoline;
6057 #endif
6059 #ifdef NON_SAVING_SETJMP
6060 /* Don't put any variables in registers if we call setjmp
6061 on a machine that fails to restore the registers. */
6062 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6064 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6065 setjmp_protect (DECL_INITIAL (current_function_decl));
6067 setjmp_protect_args ();
6069 #endif
6071 /* Save the argument pointer if a save area was made for it. */
6072 if (arg_pointer_save_area)
6074 /* arg_pointer_save_area may not be a valid memory address, so we
6075 have to check it and fix it if necessary. */
6076 rtx seq;
6077 start_sequence ();
6078 emit_move_insn (validize_mem (arg_pointer_save_area),
6079 virtual_incoming_args_rtx);
6080 seq = gen_sequence ();
6081 end_sequence ();
6082 emit_insn_before (seq, tail_recursion_reentry);
6085 /* Initialize any trampolines required by this function. */
6086 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6088 tree function = TREE_PURPOSE (link);
6089 rtx context = lookup_static_chain (function);
6090 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6091 #ifdef TRAMPOLINE_TEMPLATE
6092 rtx blktramp;
6093 #endif
6094 rtx seq;
6096 #ifdef TRAMPOLINE_TEMPLATE
6097 /* First make sure this compilation has a template for
6098 initializing trampolines. */
6099 if (initial_trampoline == 0)
6101 end_temporary_allocation ();
6102 initial_trampoline
6103 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6104 resume_temporary_allocation ();
6106 #endif
6108 /* Generate insns to initialize the trampoline. */
6109 start_sequence ();
6110 tramp = round_trampoline_addr (XEXP (tramp, 0));
6111 #ifdef TRAMPOLINE_TEMPLATE
6112 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6113 emit_block_move (blktramp, initial_trampoline,
6114 GEN_INT (TRAMPOLINE_SIZE),
6115 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6116 #endif
6117 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6118 seq = get_insns ();
6119 end_sequence ();
6121 /* Put those insns at entry to the containing function (this one). */
6122 emit_insns_before (seq, tail_recursion_reentry);
6125 /* If we are doing stack checking and this function makes calls,
6126 do a stack probe at the start of the function to ensure we have enough
6127 space for another stack frame. */
6128 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6130 rtx insn, seq;
6132 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6133 if (GET_CODE (insn) == CALL_INSN)
6135 start_sequence ();
6136 probe_stack_range (STACK_CHECK_PROTECT,
6137 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6138 seq = get_insns ();
6139 end_sequence ();
6140 emit_insns_before (seq, tail_recursion_reentry);
6141 break;
6145 /* Warn about unused parms if extra warnings were specified. */
6146 if (warn_unused && extra_warnings)
6148 tree decl;
6150 for (decl = DECL_ARGUMENTS (current_function_decl);
6151 decl; decl = TREE_CHAIN (decl))
6152 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6153 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6154 warning_with_decl (decl, "unused parameter `%s'");
6157 /* Delete handlers for nonlocal gotos if nothing uses them. */
6158 if (nonlocal_goto_handler_slots != 0
6159 && ! current_function_has_nonlocal_label)
6160 delete_handlers ();
6162 /* End any sequences that failed to be closed due to syntax errors. */
6163 while (in_sequence_p ())
6164 end_sequence ();
6166 /* Outside function body, can't compute type's actual size
6167 until next function's body starts. */
6168 immediate_size_expand--;
6170 /* If doing stupid register allocation,
6171 mark register parms as dying here. */
6173 if (obey_regdecls)
6175 rtx tem;
6176 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6177 use_variable (regno_reg_rtx[i]);
6179 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6181 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6183 use_variable (XEXP (tem, 0));
6184 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6187 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6188 use_variable (current_function_internal_arg_pointer);
6191 clear_pending_stack_adjust ();
6192 do_pending_stack_adjust ();
6194 /* Mark the end of the function body.
6195 If control reaches this insn, the function can drop through
6196 without returning a value. */
6197 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6199 /* Must mark the last line number note in the function, so that the test
6200 coverage code can avoid counting the last line twice. This just tells
6201 the code to ignore the immediately following line note, since there
6202 already exists a copy of this note somewhere above. This line number
6203 note is still needed for debugging though, so we can't delete it. */
6204 if (flag_test_coverage)
6205 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6207 /* Output a linenumber for the end of the function.
6208 SDB depends on this. */
6209 emit_line_note_force (filename, line);
6211 /* Output the label for the actual return from the function,
6212 if one is expected. This happens either because a function epilogue
6213 is used instead of a return instruction, or because a return was done
6214 with a goto in order to run local cleanups, or because of pcc-style
6215 structure returning. */
6217 if (return_label)
6218 emit_label (return_label);
6220 /* C++ uses this. */
6221 if (end_bindings)
6222 expand_end_bindings (0, 0, 0);
6224 /* Now handle any leftover exception regions that may have been
6225 created for the parameters. */
6227 rtx last = get_last_insn ();
6228 rtx label;
6230 expand_leftover_cleanups ();
6232 /* If the above emitted any code, may sure we jump around it. */
6233 if (last != get_last_insn ())
6235 label = gen_label_rtx ();
6236 last = emit_jump_insn_after (gen_jump (label), last);
6237 last = emit_barrier_after (last);
6238 emit_label (label);
6242 if (current_function_instrument_entry_exit)
6244 rtx fun = DECL_RTL (current_function_decl);
6245 if (GET_CODE (fun) == MEM)
6246 fun = XEXP (fun, 0);
6247 else
6248 abort ();
6249 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6250 fun, Pmode,
6251 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6253 hard_frame_pointer_rtx),
6254 Pmode);
6257 /* If we had calls to alloca, and this machine needs
6258 an accurate stack pointer to exit the function,
6259 insert some code to save and restore the stack pointer. */
6260 #ifdef EXIT_IGNORE_STACK
6261 if (! EXIT_IGNORE_STACK)
6262 #endif
6263 if (current_function_calls_alloca)
6265 rtx tem = 0;
6267 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6268 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6271 /* If scalar return value was computed in a pseudo-reg,
6272 copy that to the hard return register. */
6273 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6274 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6275 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6276 >= FIRST_PSEUDO_REGISTER))
6278 rtx real_decl_result;
6280 #ifdef FUNCTION_OUTGOING_VALUE
6281 real_decl_result
6282 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6283 current_function_decl);
6284 #else
6285 real_decl_result
6286 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6287 current_function_decl);
6288 #endif
6289 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6290 /* If this is a BLKmode structure being returned in registers, then use
6291 the mode computed in expand_return. */
6292 if (GET_MODE (real_decl_result) == BLKmode)
6293 PUT_MODE (real_decl_result,
6294 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6295 emit_move_insn (real_decl_result,
6296 DECL_RTL (DECL_RESULT (current_function_decl)));
6297 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6299 /* The delay slot scheduler assumes that current_function_return_rtx
6300 holds the hard register containing the return value, not a temporary
6301 pseudo. */
6302 current_function_return_rtx = real_decl_result;
6305 /* If returning a structure, arrange to return the address of the value
6306 in a place where debuggers expect to find it.
6308 If returning a structure PCC style,
6309 the caller also depends on this value.
6310 And current_function_returns_pcc_struct is not necessarily set. */
6311 if (current_function_returns_struct
6312 || current_function_returns_pcc_struct)
6314 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6315 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6316 #ifdef FUNCTION_OUTGOING_VALUE
6317 rtx outgoing
6318 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6319 current_function_decl);
6320 #else
6321 rtx outgoing
6322 = FUNCTION_VALUE (build_pointer_type (type),
6323 current_function_decl);
6324 #endif
6326 /* Mark this as a function return value so integrate will delete the
6327 assignment and USE below when inlining this function. */
6328 REG_FUNCTION_VALUE_P (outgoing) = 1;
6330 emit_move_insn (outgoing, value_address);
6331 use_variable (outgoing);
6334 /* If this is an implementation of __throw, do what's necessary to
6335 communicate between __builtin_eh_return and the epilogue. */
6336 expand_eh_return ();
6338 /* Output a return insn if we are using one.
6339 Otherwise, let the rtl chain end here, to drop through
6340 into the epilogue. */
6342 #ifdef HAVE_return
6343 if (HAVE_return)
6345 emit_jump_insn (gen_return ());
6346 emit_barrier ();
6348 #endif
6350 /* Fix up any gotos that jumped out to the outermost
6351 binding level of the function.
6352 Must follow emitting RETURN_LABEL. */
6354 /* If you have any cleanups to do at this point,
6355 and they need to create temporary variables,
6356 then you will lose. */
6357 expand_fixups (get_insns ());
6360 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6362 static int *prologue;
6363 static int *epilogue;
6365 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6366 or a single insn). */
6368 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6369 static int *
6370 record_insns (insns)
6371 rtx insns;
6373 int *vec;
6375 if (GET_CODE (insns) == SEQUENCE)
6377 int len = XVECLEN (insns, 0);
6378 vec = (int *) oballoc ((len + 1) * sizeof (int));
6379 vec[len] = 0;
6380 while (--len >= 0)
6381 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6383 else
6385 vec = (int *) oballoc (2 * sizeof (int));
6386 vec[0] = INSN_UID (insns);
6387 vec[1] = 0;
6389 return vec;
6392 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6394 static int
6395 contains (insn, vec)
6396 rtx insn;
6397 int *vec;
6399 register int i, j;
6401 if (GET_CODE (insn) == INSN
6402 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6404 int count = 0;
6405 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6406 for (j = 0; vec[j]; j++)
6407 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6408 count++;
6409 return count;
6411 else
6413 for (j = 0; vec[j]; j++)
6414 if (INSN_UID (insn) == vec[j])
6415 return 1;
6417 return 0;
6419 #endif /* HAVE_prologue || HAVE_epilogue */
6421 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6422 this into place with notes indicating where the prologue ends and where
6423 the epilogue begins. Update the basic block information when possible. */
6425 void
6426 thread_prologue_and_epilogue_insns (f)
6427 rtx f;
6429 int insertted = 0;
6431 prologue = 0;
6432 #ifdef HAVE_prologue
6433 if (HAVE_prologue)
6435 rtx seq;
6437 start_sequence ();
6438 seq = gen_prologue();
6439 emit_insn (seq);
6441 /* Retain a map of the prologue insns. */
6442 if (GET_CODE (seq) != SEQUENCE)
6443 seq = get_insns ();
6444 prologue = record_insns (seq);
6446 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6447 seq = gen_sequence ();
6448 end_sequence ();
6450 /* If optimization is off, and perhaps in an empty function,
6451 the entry block will have no successors. */
6452 if (ENTRY_BLOCK_PTR->succ)
6454 /* Can't deal with multiple successsors of the entry block. */
6455 if (ENTRY_BLOCK_PTR->succ->succ_next)
6456 abort ();
6458 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6459 insertted = 1;
6461 else
6462 emit_insn_after (seq, f);
6464 #endif
6466 epilogue = 0;
6467 #ifdef HAVE_epilogue
6468 if (HAVE_epilogue)
6470 edge e;
6471 basic_block bb = 0;
6472 rtx tail = get_last_insn ();
6474 /* ??? This is gastly. If function returns were not done via uses,
6475 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6476 and all of this uglyness would go away. */
6478 switch (optimize)
6480 default:
6481 /* If the exit block has no non-fake predecessors, we don't
6482 need an epilogue. Furthermore, only pay attention to the
6483 fallthru predecessors; if (conditional) return insns were
6484 generated, by definition we do not need to emit epilogue
6485 insns. */
6487 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6488 if ((e->flags & EDGE_FAKE) == 0
6489 && (e->flags & EDGE_FALLTHRU) != 0)
6490 break;
6491 if (e == NULL)
6492 break;
6494 /* We can't handle multiple epilogues -- if one is needed,
6495 we won't be able to place it multiple times.
6497 ??? Fix epilogue expanders to not assume they are the
6498 last thing done compiling the function. Either that
6499 or copy_rtx each insn.
6501 ??? Blah, it's not a simple expression to assert that
6502 we've exactly one fallthru exit edge. */
6504 bb = e->src;
6505 tail = bb->end;
6507 /* ??? If the last insn of the basic block is a jump, then we
6508 are creating a new basic block. Wimp out and leave these
6509 insns outside any block. */
6510 if (GET_CODE (tail) == JUMP_INSN)
6511 bb = 0;
6513 /* FALLTHRU */
6514 case 0:
6516 rtx prev, seq, first_use;
6518 /* Move the USE insns at the end of a function onto a list. */
6519 prev = tail;
6520 if (GET_CODE (prev) == BARRIER
6521 || GET_CODE (prev) == NOTE)
6522 prev = prev_nonnote_insn (prev);
6524 first_use = 0;
6525 if (prev
6526 && GET_CODE (prev) == INSN
6527 && GET_CODE (PATTERN (prev)) == USE)
6529 /* If the end of the block is the use, grab hold of something
6530 else so that we emit barriers etc in the right place. */
6531 if (prev == tail)
6534 tail = PREV_INSN (tail);
6535 while (GET_CODE (tail) == INSN
6536 && GET_CODE (PATTERN (tail)) == USE);
6541 rtx use = prev;
6542 prev = prev_nonnote_insn (prev);
6544 remove_insn (use);
6545 if (first_use)
6547 NEXT_INSN (use) = first_use;
6548 PREV_INSN (first_use) = use;
6550 else
6551 NEXT_INSN (use) = NULL_RTX;
6552 first_use = use;
6554 while (prev
6555 && GET_CODE (prev) == INSN
6556 && GET_CODE (PATTERN (prev)) == USE);
6559 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6560 epilogue insns, the USE insns at the end of a function,
6561 the jump insn that returns, and then a BARRIER. */
6563 if (GET_CODE (tail) != BARRIER)
6565 prev = next_nonnote_insn (tail);
6566 if (!prev || GET_CODE (prev) != BARRIER)
6567 emit_barrier_after (tail);
6570 seq = gen_epilogue ();
6571 prev = tail;
6572 tail = emit_jump_insn_after (seq, tail);
6574 /* Insert the USE insns immediately before the return insn, which
6575 must be the last instruction emitted in the sequence. */
6576 if (first_use)
6577 emit_insns_before (first_use, tail);
6578 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6580 /* Update the tail of the basic block. */
6581 if (bb)
6582 bb->end = tail;
6584 /* Retain a map of the epilogue insns. */
6585 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6589 #endif
6591 if (insertted)
6592 commit_edge_insertions ();
6595 /* Reposition the prologue-end and epilogue-begin notes after instruction
6596 scheduling and delayed branch scheduling. */
6598 void
6599 reposition_prologue_and_epilogue_notes (f)
6600 rtx f ATTRIBUTE_UNUSED;
6602 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6603 /* Reposition the prologue and epilogue notes. */
6604 if (n_basic_blocks)
6606 int len;
6608 if (prologue)
6610 register rtx insn, note = 0;
6612 /* Scan from the beginning until we reach the last prologue insn.
6613 We apparently can't depend on basic_block_{head,end} after
6614 reorg has run. */
6615 for (len = 0; prologue[len]; len++)
6617 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6619 if (GET_CODE (insn) == NOTE)
6621 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6622 note = insn;
6624 else if ((len -= contains (insn, prologue)) == 0)
6626 rtx next;
6627 /* Find the prologue-end note if we haven't already, and
6628 move it to just after the last prologue insn. */
6629 if (note == 0)
6631 for (note = insn; (note = NEXT_INSN (note));)
6632 if (GET_CODE (note) == NOTE
6633 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6634 break;
6637 next = NEXT_INSN (note);
6639 /* Whether or not we can depend on BLOCK_HEAD,
6640 attempt to keep it up-to-date. */
6641 if (BLOCK_HEAD (0) == note)
6642 BLOCK_HEAD (0) = next;
6644 remove_insn (note);
6645 add_insn_after (note, insn);
6650 if (epilogue)
6652 register rtx insn, note = 0;
6654 /* Scan from the end until we reach the first epilogue insn.
6655 We apparently can't depend on basic_block_{head,end} after
6656 reorg has run. */
6657 for (len = 0; epilogue[len]; len++)
6659 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6661 if (GET_CODE (insn) == NOTE)
6663 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6664 note = insn;
6666 else if ((len -= contains (insn, epilogue)) == 0)
6668 /* Find the epilogue-begin note if we haven't already, and
6669 move it to just before the first epilogue insn. */
6670 if (note == 0)
6672 for (note = insn; (note = PREV_INSN (note));)
6673 if (GET_CODE (note) == NOTE
6674 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6675 break;
6678 /* Whether or not we can depend on BLOCK_HEAD,
6679 attempt to keep it up-to-date. */
6680 if (n_basic_blocks
6681 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6682 BLOCK_HEAD (n_basic_blocks-1) = note;
6684 remove_insn (note);
6685 add_insn_before (note, insn);
6690 #endif /* HAVE_prologue or HAVE_epilogue */