Import final gcc2 snapshot (990109)
[official-gcc.git] / gcc / function.c
blob0271f0a706cc1f850f2e57c7243d72dbf62b2241
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
63 #endif
65 /* Some systems use __main in a way incompatible with its use in gcc, in these
66 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
67 give the same symbol without quotes for an alternative entry point. You
68 must define both, or neither. */
69 #ifndef NAME__MAIN
70 #define NAME__MAIN "__main"
71 #define SYMBOL__MAIN __main
72 #endif
74 /* Round a value to the lowest integer less than it that is a multiple of
75 the required alignment. Avoid using division in case the value is
76 negative. Assume the alignment is a power of two. */
77 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
79 /* Similar, but round to the next highest integer that meets the
80 alignment. */
81 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
83 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
84 during rtl generation. If they are different register numbers, this is
85 always true. It may also be true if
86 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
87 generation. See fix_lexical_addr for details. */
89 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
90 #define NEED_SEPARATE_AP
91 #endif
93 /* Number of bytes of args popped by function being compiled on its return.
94 Zero if no bytes are to be popped.
95 May affect compilation of return insn or of function epilogue. */
97 int current_function_pops_args;
99 /* Nonzero if function being compiled needs to be given an address
100 where the value should be stored. */
102 int current_function_returns_struct;
104 /* Nonzero if function being compiled needs to
105 return the address of where it has put a structure value. */
107 int current_function_returns_pcc_struct;
109 /* Nonzero if function being compiled needs to be passed a static chain. */
111 int current_function_needs_context;
113 /* Nonzero if function being compiled can call setjmp. */
115 int current_function_calls_setjmp;
117 /* Nonzero if function being compiled can call longjmp. */
119 int current_function_calls_longjmp;
121 /* Nonzero if function being compiled receives nonlocal gotos
122 from nested functions. */
124 int current_function_has_nonlocal_label;
126 /* Nonzero if function being compiled has nonlocal gotos to parent
127 function. */
129 int current_function_has_nonlocal_goto;
131 /* Nonzero if function being compiled contains nested functions. */
133 int current_function_contains_functions;
135 /* Nonzero if the function being compiled has the address of its
136 labels taken. */
138 int current_function_addresses_labels;
140 /* Nonzero if the current function is a thunk (a lightweight function that
141 just adjusts one of its arguments and forwards to another function), so
142 we should try to cut corners where we can. */
143 int current_function_is_thunk;
145 /* Nonzero if function being compiled can call alloca,
146 either as a subroutine or builtin. */
148 int current_function_calls_alloca;
150 /* Nonzero if the current function returns a pointer type */
152 int current_function_returns_pointer;
154 /* If some insns can be deferred to the delay slots of the epilogue, the
155 delay list for them is recorded here. */
157 rtx current_function_epilogue_delay_list;
159 /* If function's args have a fixed size, this is that size, in bytes.
160 Otherwise, it is -1.
161 May affect compilation of return insn or of function epilogue. */
163 int current_function_args_size;
165 /* # bytes the prologue should push and pretend that the caller pushed them.
166 The prologue must do this, but only if parms can be passed in registers. */
168 int current_function_pretend_args_size;
170 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
171 defined, the needed space is pushed by the prologue. */
173 int current_function_outgoing_args_size;
175 /* This is the offset from the arg pointer to the place where the first
176 anonymous arg can be found, if there is one. */
178 rtx current_function_arg_offset_rtx;
180 /* Nonzero if current function uses varargs.h or equivalent.
181 Zero for functions that use stdarg.h. */
183 int current_function_varargs;
185 /* Nonzero if current function uses stdarg.h or equivalent.
186 Zero for functions that use varargs.h. */
188 int current_function_stdarg;
190 /* Quantities of various kinds of registers
191 used for the current function's args. */
193 CUMULATIVE_ARGS current_function_args_info;
195 /* Name of function now being compiled. */
197 char *current_function_name;
199 /* If non-zero, an RTL expression for the location at which the current
200 function returns its result. If the current function returns its
201 result in a register, current_function_return_rtx will always be
202 the hard register containing the result. */
204 rtx current_function_return_rtx;
206 /* Nonzero if the current function uses the constant pool. */
208 int current_function_uses_const_pool;
210 /* Nonzero if the current function uses pic_offset_table_rtx. */
211 int current_function_uses_pic_offset_table;
213 /* The arg pointer hard register, or the pseudo into which it was copied. */
214 rtx current_function_internal_arg_pointer;
216 /* Nonzero if memory access checking should be enabled in the current
217 function. */
218 int current_function_check_memory_usage;
220 /* The FUNCTION_DECL for an inline function currently being expanded. */
221 tree inline_function_decl;
223 /* Number of function calls seen so far in current function. */
225 int function_call_count;
227 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
228 (labels to which there can be nonlocal gotos from nested functions)
229 in this function. */
231 tree nonlocal_labels;
233 /* RTX for stack slot that holds the current handler for nonlocal gotos.
234 Zero when function does not have nonlocal labels. */
236 rtx nonlocal_goto_handler_slot;
238 /* RTX for stack slot that holds the stack pointer value to restore
239 for a nonlocal goto.
240 Zero when function does not have nonlocal labels. */
242 rtx nonlocal_goto_stack_level;
244 /* Label that will go on parm cleanup code, if any.
245 Jumping to this label runs cleanup code for parameters, if
246 such code must be run. Following this code is the logical return label. */
248 rtx cleanup_label;
250 /* Label that will go on function epilogue.
251 Jumping to this label serves as a "return" instruction
252 on machines which require execution of the epilogue on all returns. */
254 rtx return_label;
256 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
257 So we can mark them all live at the end of the function, if nonopt. */
258 rtx save_expr_regs;
260 /* List (chain of EXPR_LISTs) of all stack slots in this function.
261 Made for the sake of unshare_all_rtl. */
262 rtx stack_slot_list;
264 /* Chain of all RTL_EXPRs that have insns in them. */
265 tree rtl_expr_chain;
267 /* Label to jump back to for tail recursion, or 0 if we have
268 not yet needed one for this function. */
269 rtx tail_recursion_label;
271 /* Place after which to insert the tail_recursion_label if we need one. */
272 rtx tail_recursion_reentry;
274 /* Location at which to save the argument pointer if it will need to be
275 referenced. There are two cases where this is done: if nonlocal gotos
276 exist, or if vars stored at an offset from the argument pointer will be
277 needed by inner routines. */
279 rtx arg_pointer_save_area;
281 /* Offset to end of allocated area of stack frame.
282 If stack grows down, this is the address of the last stack slot allocated.
283 If stack grows up, this is the address for the next slot. */
284 HOST_WIDE_INT frame_offset;
286 /* List (chain of TREE_LISTs) of static chains for containing functions.
287 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
288 in an RTL_EXPR in the TREE_VALUE. */
289 static tree context_display;
291 /* List (chain of TREE_LISTs) of trampolines for nested functions.
292 The trampoline sets up the static chain and jumps to the function.
293 We supply the trampoline's address when the function's address is requested.
295 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
296 in an RTL_EXPR in the TREE_VALUE. */
297 static tree trampoline_list;
299 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
300 static rtx parm_birth_insn;
302 #if 0
303 /* Nonzero if a stack slot has been generated whose address is not
304 actually valid. It means that the generated rtl must all be scanned
305 to detect and correct the invalid addresses where they occur. */
306 static int invalid_stack_slot;
307 #endif
309 /* Last insn of those whose job was to put parms into their nominal homes. */
310 static rtx last_parm_insn;
312 /* 1 + last pseudo register number possibly used for loading a copy
313 of a parameter of this function. */
314 int max_parm_reg;
316 /* Vector indexed by REGNO, containing location on stack in which
317 to put the parm which is nominally in pseudo register REGNO,
318 if we discover that that parm must go in the stack. The highest
319 element in this vector is one less than MAX_PARM_REG, above. */
320 rtx *parm_reg_stack_loc;
322 /* Nonzero once virtual register instantiation has been done.
323 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
324 static int virtuals_instantiated;
326 /* These variables hold pointers to functions to
327 save and restore machine-specific data,
328 in push_function_context and pop_function_context. */
329 void (*save_machine_status) PROTO((struct function *));
330 void (*restore_machine_status) PROTO((struct function *));
332 /* Nonzero if we need to distinguish between the return value of this function
333 and the return value of a function called by this function. This helps
334 integrate.c */
336 extern int rtx_equal_function_value_matters;
337 extern tree sequence_rtl_expr;
339 /* In order to evaluate some expressions, such as function calls returning
340 structures in memory, we need to temporarily allocate stack locations.
341 We record each allocated temporary in the following structure.
343 Associated with each temporary slot is a nesting level. When we pop up
344 one level, all temporaries associated with the previous level are freed.
345 Normally, all temporaries are freed after the execution of the statement
346 in which they were created. However, if we are inside a ({...}) grouping,
347 the result may be in a temporary and hence must be preserved. If the
348 result could be in a temporary, we preserve it if we can determine which
349 one it is in. If we cannot determine which temporary may contain the
350 result, all temporaries are preserved. A temporary is preserved by
351 pretending it was allocated at the previous nesting level.
353 Automatic variables are also assigned temporary slots, at the nesting
354 level where they are defined. They are marked a "kept" so that
355 free_temp_slots will not free them. */
357 struct temp_slot
359 /* Points to next temporary slot. */
360 struct temp_slot *next;
361 /* The rtx to used to reference the slot. */
362 rtx slot;
363 /* The rtx used to represent the address if not the address of the
364 slot above. May be an EXPR_LIST if multiple addresses exist. */
365 rtx address;
366 /* The size, in units, of the slot. */
367 HOST_WIDE_INT size;
368 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
369 tree rtl_expr;
370 /* Non-zero if this temporary is currently in use. */
371 char in_use;
372 /* Non-zero if this temporary has its address taken. */
373 char addr_taken;
374 /* Nesting level at which this slot is being used. */
375 int level;
376 /* Non-zero if this should survive a call to free_temp_slots. */
377 int keep;
378 /* The offset of the slot from the frame_pointer, including extra space
379 for alignment. This info is for combine_temp_slots. */
380 HOST_WIDE_INT base_offset;
381 /* The size of the slot, including extra space for alignment. This
382 info is for combine_temp_slots. */
383 HOST_WIDE_INT full_size;
386 /* List of all temporaries allocated, both available and in use. */
388 struct temp_slot *temp_slots;
390 /* Current nesting level for temporaries. */
392 int temp_slot_level;
394 /* Current nesting level for variables in a block. */
396 int var_temp_slot_level;
398 /* When temporaries are created by TARGET_EXPRs, they are created at
399 this level of temp_slot_level, so that they can remain allocated
400 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
401 of TARGET_EXPRs. */
402 int target_temp_slot_level;
404 /* The FUNCTION_DECL node for the current function. */
405 static tree this_function_decl;
407 /* Callinfo pointer for the current function. */
408 static rtx this_function_callinfo;
410 /* The label in the bytecode file of this function's actual bytecode.
411 Not an rtx. */
412 static char *this_function_bytecode;
414 /* The call description vector for the current function. */
415 static rtx this_function_calldesc;
417 /* Size of the local variables allocated for the current function. */
418 int local_vars_size;
420 /* Current depth of the bytecode evaluation stack. */
421 int stack_depth;
423 /* Maximum depth of the evaluation stack in this function. */
424 int max_stack_depth;
426 /* Current depth in statement expressions. */
427 static int stmt_expr_depth;
429 /* This structure is used to record MEMs or pseudos used to replace VAR, any
430 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
431 maintain this list in case two operands of an insn were required to match;
432 in that case we must ensure we use the same replacement. */
434 struct fixup_replacement
436 rtx old;
437 rtx new;
438 struct fixup_replacement *next;
441 /* Forward declarations. */
443 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
444 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
445 enum machine_mode, enum machine_mode,
446 int, int, int));
447 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
448 static struct fixup_replacement
449 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
450 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
451 rtx, int));
452 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
453 struct fixup_replacement **));
454 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
455 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
456 static rtx fixup_stack_1 PROTO((rtx, rtx));
457 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
458 static void instantiate_decls PROTO((tree, int));
459 static void instantiate_decls_1 PROTO((tree, int));
460 static void instantiate_decl PROTO((rtx, int, int));
461 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
462 static void delete_handlers PROTO((void));
463 static void pad_to_arg_alignment PROTO((struct args_size *, int));
464 static void pad_below PROTO((struct args_size *, enum machine_mode,
465 tree));
466 static tree round_down PROTO((tree, int));
467 static rtx round_trampoline_addr PROTO((rtx));
468 static tree blocks_nreverse PROTO((tree));
469 static int all_blocks PROTO((tree, tree *));
470 static int *record_insns PROTO((rtx));
471 static int contains PROTO((rtx, int *));
472 static void put_addressof_into_stack PROTO((rtx));
473 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
475 /* Pointer to chain of `struct function' for containing functions. */
476 struct function *outer_function_chain;
478 /* Given a function decl for a containing function,
479 return the `struct function' for it. */
481 struct function *
482 find_function_data (decl)
483 tree decl;
485 struct function *p;
487 for (p = outer_function_chain; p; p = p->next)
488 if (p->decl == decl)
489 return p;
491 abort ();
494 /* Save the current context for compilation of a nested function.
495 This is called from language-specific code.
496 The caller is responsible for saving any language-specific status,
497 since this function knows only about language-independent variables. */
499 void
500 push_function_context_to (context)
501 tree context;
503 struct function *p = (struct function *) xmalloc (sizeof (struct function));
505 p->next = outer_function_chain;
506 outer_function_chain = p;
508 p->name = current_function_name;
509 p->decl = current_function_decl;
510 p->pops_args = current_function_pops_args;
511 p->returns_struct = current_function_returns_struct;
512 p->returns_pcc_struct = current_function_returns_pcc_struct;
513 p->returns_pointer = current_function_returns_pointer;
514 p->needs_context = current_function_needs_context;
515 p->calls_setjmp = current_function_calls_setjmp;
516 p->calls_longjmp = current_function_calls_longjmp;
517 p->calls_alloca = current_function_calls_alloca;
518 p->has_nonlocal_label = current_function_has_nonlocal_label;
519 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
520 p->contains_functions = current_function_contains_functions;
521 p->addresses_labels = current_function_addresses_labels;
522 p->is_thunk = current_function_is_thunk;
523 p->args_size = current_function_args_size;
524 p->pretend_args_size = current_function_pretend_args_size;
525 p->arg_offset_rtx = current_function_arg_offset_rtx;
526 p->varargs = current_function_varargs;
527 p->stdarg = current_function_stdarg;
528 p->uses_const_pool = current_function_uses_const_pool;
529 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
530 p->internal_arg_pointer = current_function_internal_arg_pointer;
531 p->max_parm_reg = max_parm_reg;
532 p->parm_reg_stack_loc = parm_reg_stack_loc;
533 p->outgoing_args_size = current_function_outgoing_args_size;
534 p->return_rtx = current_function_return_rtx;
535 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
536 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
537 p->nonlocal_labels = nonlocal_labels;
538 p->cleanup_label = cleanup_label;
539 p->return_label = return_label;
540 p->save_expr_regs = save_expr_regs;
541 p->stack_slot_list = stack_slot_list;
542 p->parm_birth_insn = parm_birth_insn;
543 p->frame_offset = frame_offset;
544 p->tail_recursion_label = tail_recursion_label;
545 p->tail_recursion_reentry = tail_recursion_reentry;
546 p->arg_pointer_save_area = arg_pointer_save_area;
547 p->rtl_expr_chain = rtl_expr_chain;
548 p->last_parm_insn = last_parm_insn;
549 p->context_display = context_display;
550 p->trampoline_list = trampoline_list;
551 p->function_call_count = function_call_count;
552 p->temp_slots = temp_slots;
553 p->temp_slot_level = temp_slot_level;
554 p->target_temp_slot_level = target_temp_slot_level;
555 p->var_temp_slot_level = var_temp_slot_level;
556 p->fixup_var_refs_queue = 0;
557 p->epilogue_delay_list = current_function_epilogue_delay_list;
558 p->args_info = current_function_args_info;
559 p->check_memory_usage = current_function_check_memory_usage;
561 save_tree_status (p, context);
562 save_storage_status (p);
563 save_emit_status (p);
564 save_expr_status (p);
565 save_stmt_status (p);
566 save_varasm_status (p, context);
567 if (save_machine_status)
568 (*save_machine_status) (p);
570 init_emit ();
573 void
574 push_function_context ()
576 push_function_context_to (current_function_decl);
579 /* Restore the last saved context, at the end of a nested function.
580 This function is called from language-specific code. */
582 void
583 pop_function_context_from (context)
584 tree context;
586 struct function *p = outer_function_chain;
587 struct var_refs_queue *queue;
589 outer_function_chain = p->next;
591 current_function_contains_functions
592 = p->contains_functions || p->inline_obstacks
593 || context == current_function_decl;
594 current_function_addresses_labels = p->addresses_labels;
595 current_function_name = p->name;
596 current_function_decl = p->decl;
597 current_function_pops_args = p->pops_args;
598 current_function_returns_struct = p->returns_struct;
599 current_function_returns_pcc_struct = p->returns_pcc_struct;
600 current_function_returns_pointer = p->returns_pointer;
601 current_function_needs_context = p->needs_context;
602 current_function_calls_setjmp = p->calls_setjmp;
603 current_function_calls_longjmp = p->calls_longjmp;
604 current_function_calls_alloca = p->calls_alloca;
605 current_function_has_nonlocal_label = p->has_nonlocal_label;
606 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
607 current_function_is_thunk = p->is_thunk;
608 current_function_args_size = p->args_size;
609 current_function_pretend_args_size = p->pretend_args_size;
610 current_function_arg_offset_rtx = p->arg_offset_rtx;
611 current_function_varargs = p->varargs;
612 current_function_stdarg = p->stdarg;
613 current_function_uses_const_pool = p->uses_const_pool;
614 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
615 current_function_internal_arg_pointer = p->internal_arg_pointer;
616 max_parm_reg = p->max_parm_reg;
617 parm_reg_stack_loc = p->parm_reg_stack_loc;
618 current_function_outgoing_args_size = p->outgoing_args_size;
619 current_function_return_rtx = p->return_rtx;
620 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
621 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
622 nonlocal_labels = p->nonlocal_labels;
623 cleanup_label = p->cleanup_label;
624 return_label = p->return_label;
625 save_expr_regs = p->save_expr_regs;
626 stack_slot_list = p->stack_slot_list;
627 parm_birth_insn = p->parm_birth_insn;
628 frame_offset = p->frame_offset;
629 tail_recursion_label = p->tail_recursion_label;
630 tail_recursion_reentry = p->tail_recursion_reentry;
631 arg_pointer_save_area = p->arg_pointer_save_area;
632 rtl_expr_chain = p->rtl_expr_chain;
633 last_parm_insn = p->last_parm_insn;
634 context_display = p->context_display;
635 trampoline_list = p->trampoline_list;
636 function_call_count = p->function_call_count;
637 temp_slots = p->temp_slots;
638 temp_slot_level = p->temp_slot_level;
639 target_temp_slot_level = p->target_temp_slot_level;
640 var_temp_slot_level = p->var_temp_slot_level;
641 current_function_epilogue_delay_list = p->epilogue_delay_list;
642 reg_renumber = 0;
643 current_function_args_info = p->args_info;
644 current_function_check_memory_usage = p->check_memory_usage;
646 restore_tree_status (p, context);
647 restore_storage_status (p);
648 restore_expr_status (p);
649 restore_emit_status (p);
650 restore_stmt_status (p);
651 restore_varasm_status (p);
653 if (restore_machine_status)
654 (*restore_machine_status) (p);
656 /* Finish doing put_var_into_stack for any of our variables
657 which became addressable during the nested function. */
658 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
659 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
661 free (p);
663 /* Reset variables that have known state during rtx generation. */
664 rtx_equal_function_value_matters = 1;
665 virtuals_instantiated = 0;
668 void pop_function_context ()
670 pop_function_context_from (current_function_decl);
673 /* Allocate fixed slots in the stack frame of the current function. */
675 /* Return size needed for stack frame based on slots so far allocated.
676 This size counts from zero. It is not rounded to STACK_BOUNDARY;
677 the caller may have to do that. */
679 HOST_WIDE_INT
680 get_frame_size ()
682 #ifdef FRAME_GROWS_DOWNWARD
683 return -frame_offset;
684 #else
685 return frame_offset;
686 #endif
689 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
690 with machine mode MODE.
692 ALIGN controls the amount of alignment for the address of the slot:
693 0 means according to MODE,
694 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
695 positive specifies alignment boundary in bits.
697 We do not round to stack_boundary here. */
700 assign_stack_local (mode, size, align)
701 enum machine_mode mode;
702 HOST_WIDE_INT size;
703 int align;
705 register rtx x, addr;
706 int bigend_correction = 0;
707 int alignment;
709 if (align == 0)
711 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
712 if (mode == BLKmode)
713 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
715 else if (align == -1)
717 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
718 size = CEIL_ROUND (size, alignment);
720 else
721 alignment = align / BITS_PER_UNIT;
723 /* Round frame offset to that alignment.
724 We must be careful here, since FRAME_OFFSET might be negative and
725 division with a negative dividend isn't as well defined as we might
726 like. So we instead assume that ALIGNMENT is a power of two and
727 use logical operations which are unambiguous. */
728 #ifdef FRAME_GROWS_DOWNWARD
729 frame_offset = FLOOR_ROUND (frame_offset, alignment);
730 #else
731 frame_offset = CEIL_ROUND (frame_offset, alignment);
732 #endif
734 /* On a big-endian machine, if we are allocating more space than we will use,
735 use the least significant bytes of those that are allocated. */
736 if (BYTES_BIG_ENDIAN && mode != BLKmode)
737 bigend_correction = size - GET_MODE_SIZE (mode);
739 #ifdef FRAME_GROWS_DOWNWARD
740 frame_offset -= size;
741 #endif
743 /* If we have already instantiated virtual registers, return the actual
744 address relative to the frame pointer. */
745 if (virtuals_instantiated)
746 addr = plus_constant (frame_pointer_rtx,
747 (frame_offset + bigend_correction
748 + STARTING_FRAME_OFFSET));
749 else
750 addr = plus_constant (virtual_stack_vars_rtx,
751 frame_offset + bigend_correction);
753 #ifndef FRAME_GROWS_DOWNWARD
754 frame_offset += size;
755 #endif
757 x = gen_rtx_MEM (mode, addr);
759 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
761 return x;
764 /* Assign a stack slot in a containing function.
765 First three arguments are same as in preceding function.
766 The last argument specifies the function to allocate in. */
769 assign_outer_stack_local (mode, size, align, function)
770 enum machine_mode mode;
771 HOST_WIDE_INT size;
772 int align;
773 struct function *function;
775 register rtx x, addr;
776 int bigend_correction = 0;
777 int alignment;
779 /* Allocate in the memory associated with the function in whose frame
780 we are assigning. */
781 push_obstacks (function->function_obstack,
782 function->function_maybepermanent_obstack);
784 if (align == 0)
786 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
787 if (mode == BLKmode)
788 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
790 else if (align == -1)
792 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
793 size = CEIL_ROUND (size, alignment);
795 else
796 alignment = align / BITS_PER_UNIT;
798 /* Round frame offset to that alignment. */
799 #ifdef FRAME_GROWS_DOWNWARD
800 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
801 #else
802 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
803 #endif
805 /* On a big-endian machine, if we are allocating more space than we will use,
806 use the least significant bytes of those that are allocated. */
807 if (BYTES_BIG_ENDIAN && mode != BLKmode)
808 bigend_correction = size - GET_MODE_SIZE (mode);
810 #ifdef FRAME_GROWS_DOWNWARD
811 function->frame_offset -= size;
812 #endif
813 addr = plus_constant (virtual_stack_vars_rtx,
814 function->frame_offset + bigend_correction);
815 #ifndef FRAME_GROWS_DOWNWARD
816 function->frame_offset += size;
817 #endif
819 x = gen_rtx_MEM (mode, addr);
821 function->stack_slot_list
822 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
824 pop_obstacks ();
826 return x;
829 /* Allocate a temporary stack slot and record it for possible later
830 reuse.
832 MODE is the machine mode to be given to the returned rtx.
834 SIZE is the size in units of the space required. We do no rounding here
835 since assign_stack_local will do any required rounding.
837 KEEP is 1 if this slot is to be retained after a call to
838 free_temp_slots. Automatic variables for a block are allocated
839 with this flag. KEEP is 2 if we allocate a longer term temporary,
840 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
841 if we are to allocate something at an inner level to be treated as
842 a variable in the block (e.g., a SAVE_EXPR). */
845 assign_stack_temp (mode, size, keep)
846 enum machine_mode mode;
847 HOST_WIDE_INT size;
848 int keep;
850 struct temp_slot *p, *best_p = 0;
852 /* If SIZE is -1 it means that somebody tried to allocate a temporary
853 of a variable size. */
854 if (size == -1)
855 abort ();
857 /* First try to find an available, already-allocated temporary that is the
858 exact size we require. */
859 for (p = temp_slots; p; p = p->next)
860 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
861 break;
863 /* If we didn't find, one, try one that is larger than what we want. We
864 find the smallest such. */
865 if (p == 0)
866 for (p = temp_slots; p; p = p->next)
867 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
868 && (best_p == 0 || best_p->size > p->size))
869 best_p = p;
871 /* Make our best, if any, the one to use. */
872 if (best_p)
874 /* If there are enough aligned bytes left over, make them into a new
875 temp_slot so that the extra bytes don't get wasted. Do this only
876 for BLKmode slots, so that we can be sure of the alignment. */
877 if (GET_MODE (best_p->slot) == BLKmode)
879 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
880 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
882 if (best_p->size - rounded_size >= alignment)
884 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
885 p->in_use = p->addr_taken = 0;
886 p->size = best_p->size - rounded_size;
887 p->base_offset = best_p->base_offset + rounded_size;
888 p->full_size = best_p->full_size - rounded_size;
889 p->slot = gen_rtx_MEM (BLKmode,
890 plus_constant (XEXP (best_p->slot, 0),
891 rounded_size));
892 p->address = 0;
893 p->rtl_expr = 0;
894 p->next = temp_slots;
895 temp_slots = p;
897 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
898 stack_slot_list);
900 best_p->size = rounded_size;
901 best_p->full_size = rounded_size;
905 p = best_p;
908 /* If we still didn't find one, make a new temporary. */
909 if (p == 0)
911 HOST_WIDE_INT frame_offset_old = frame_offset;
913 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
915 /* If the temp slot mode doesn't indicate the alignment,
916 use the largest possible, so no one will be disappointed. */
917 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
919 /* The following slot size computation is necessary because we don't
920 know the actual size of the temporary slot until assign_stack_local
921 has performed all the frame alignment and size rounding for the
922 requested temporary. Note that extra space added for alignment
923 can be either above or below this stack slot depending on which
924 way the frame grows. We include the extra space if and only if it
925 is above this slot. */
926 #ifdef FRAME_GROWS_DOWNWARD
927 p->size = frame_offset_old - frame_offset;
928 #else
929 p->size = size;
930 #endif
932 /* Now define the fields used by combine_temp_slots. */
933 #ifdef FRAME_GROWS_DOWNWARD
934 p->base_offset = frame_offset;
935 p->full_size = frame_offset_old - frame_offset;
936 #else
937 p->base_offset = frame_offset_old;
938 p->full_size = frame_offset - frame_offset_old;
939 #endif
940 p->address = 0;
941 p->next = temp_slots;
942 temp_slots = p;
945 p->in_use = 1;
946 p->addr_taken = 0;
947 p->rtl_expr = sequence_rtl_expr;
949 if (keep == 2)
951 p->level = target_temp_slot_level;
952 p->keep = 0;
954 else if (keep == 3)
956 p->level = var_temp_slot_level;
957 p->keep = 0;
959 else
961 p->level = temp_slot_level;
962 p->keep = keep;
965 /* We may be reusing an old slot, so clear any MEM flags that may have been
966 set from before. */
967 RTX_UNCHANGING_P (p->slot) = 0;
968 MEM_IN_STRUCT_P (p->slot) = 0;
969 return p->slot;
972 /* Assign a temporary of given TYPE.
973 KEEP is as for assign_stack_temp.
974 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
975 it is 0 if a register is OK.
976 DONT_PROMOTE is 1 if we should not promote values in register
977 to wider modes. */
980 assign_temp (type, keep, memory_required, dont_promote)
981 tree type;
982 int keep;
983 int memory_required;
984 int dont_promote;
986 enum machine_mode mode = TYPE_MODE (type);
987 int unsignedp = TREE_UNSIGNED (type);
989 if (mode == BLKmode || memory_required)
991 HOST_WIDE_INT size = int_size_in_bytes (type);
992 rtx tmp;
994 /* Unfortunately, we don't yet know how to allocate variable-sized
995 temporaries. However, sometimes we have a fixed upper limit on
996 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
997 instead. This is the case for Chill variable-sized strings. */
998 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
999 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1000 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1001 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1003 tmp = assign_stack_temp (mode, size, keep);
1004 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
1005 return tmp;
1008 #ifndef PROMOTE_FOR_CALL_ONLY
1009 if (! dont_promote)
1010 mode = promote_mode (type, mode, &unsignedp, 0);
1011 #endif
1013 return gen_reg_rtx (mode);
1016 /* Combine temporary stack slots which are adjacent on the stack.
1018 This allows for better use of already allocated stack space. This is only
1019 done for BLKmode slots because we can be sure that we won't have alignment
1020 problems in this case. */
1022 void
1023 combine_temp_slots ()
1025 struct temp_slot *p, *q;
1026 struct temp_slot *prev_p, *prev_q;
1027 int num_slots;
1029 /* If there are a lot of temp slots, don't do anything unless
1030 high levels of optimizaton. */
1031 if (! flag_expensive_optimizations)
1032 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1033 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1034 return;
1036 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1038 int delete_p = 0;
1040 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1041 for (q = p->next, prev_q = p; q; q = prev_q->next)
1043 int delete_q = 0;
1044 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1046 if (p->base_offset + p->full_size == q->base_offset)
1048 /* Q comes after P; combine Q into P. */
1049 p->size += q->size;
1050 p->full_size += q->full_size;
1051 delete_q = 1;
1053 else if (q->base_offset + q->full_size == p->base_offset)
1055 /* P comes after Q; combine P into Q. */
1056 q->size += p->size;
1057 q->full_size += p->full_size;
1058 delete_p = 1;
1059 break;
1062 /* Either delete Q or advance past it. */
1063 if (delete_q)
1064 prev_q->next = q->next;
1065 else
1066 prev_q = q;
1068 /* Either delete P or advance past it. */
1069 if (delete_p)
1071 if (prev_p)
1072 prev_p->next = p->next;
1073 else
1074 temp_slots = p->next;
1076 else
1077 prev_p = p;
1081 /* Find the temp slot corresponding to the object at address X. */
1083 static struct temp_slot *
1084 find_temp_slot_from_address (x)
1085 rtx x;
1087 struct temp_slot *p;
1088 rtx next;
1090 for (p = temp_slots; p; p = p->next)
1092 if (! p->in_use)
1093 continue;
1095 else if (XEXP (p->slot, 0) == x
1096 || p->address == x
1097 || (GET_CODE (x) == PLUS
1098 && XEXP (x, 0) == virtual_stack_vars_rtx
1099 && GET_CODE (XEXP (x, 1)) == CONST_INT
1100 && INTVAL (XEXP (x, 1)) >= p->base_offset
1101 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1102 return p;
1104 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1105 for (next = p->address; next; next = XEXP (next, 1))
1106 if (XEXP (next, 0) == x)
1107 return p;
1110 return 0;
1113 /* Indicate that NEW is an alternate way of referring to the temp slot
1114 that previously was known by OLD. */
1116 void
1117 update_temp_slot_address (old, new)
1118 rtx old, new;
1120 struct temp_slot *p = find_temp_slot_from_address (old);
1122 /* If none, return. Else add NEW as an alias. */
1123 if (p == 0)
1124 return;
1125 else if (p->address == 0)
1126 p->address = new;
1127 else
1129 if (GET_CODE (p->address) != EXPR_LIST)
1130 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1132 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1136 /* If X could be a reference to a temporary slot, mark the fact that its
1137 address was taken. */
1139 void
1140 mark_temp_addr_taken (x)
1141 rtx x;
1143 struct temp_slot *p;
1145 if (x == 0)
1146 return;
1148 /* If X is not in memory or is at a constant address, it cannot be in
1149 a temporary slot. */
1150 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1151 return;
1153 p = find_temp_slot_from_address (XEXP (x, 0));
1154 if (p != 0)
1155 p->addr_taken = 1;
1158 /* If X could be a reference to a temporary slot, mark that slot as
1159 belonging to the to one level higher than the current level. If X
1160 matched one of our slots, just mark that one. Otherwise, we can't
1161 easily predict which it is, so upgrade all of them. Kept slots
1162 need not be touched.
1164 This is called when an ({...}) construct occurs and a statement
1165 returns a value in memory. */
1167 void
1168 preserve_temp_slots (x)
1169 rtx x;
1171 struct temp_slot *p = 0;
1173 /* If there is no result, we still might have some objects whose address
1174 were taken, so we need to make sure they stay around. */
1175 if (x == 0)
1177 for (p = temp_slots; p; p = p->next)
1178 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1179 p->level--;
1181 return;
1184 /* If X is a register that is being used as a pointer, see if we have
1185 a temporary slot we know it points to. To be consistent with
1186 the code below, we really should preserve all non-kept slots
1187 if we can't find a match, but that seems to be much too costly. */
1188 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1189 p = find_temp_slot_from_address (x);
1191 /* If X is not in memory or is at a constant address, it cannot be in
1192 a temporary slot, but it can contain something whose address was
1193 taken. */
1194 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1196 for (p = temp_slots; p; p = p->next)
1197 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1198 p->level--;
1200 return;
1203 /* First see if we can find a match. */
1204 if (p == 0)
1205 p = find_temp_slot_from_address (XEXP (x, 0));
1207 if (p != 0)
1209 /* Move everything at our level whose address was taken to our new
1210 level in case we used its address. */
1211 struct temp_slot *q;
1213 if (p->level == temp_slot_level)
1215 for (q = temp_slots; q; q = q->next)
1216 if (q != p && q->addr_taken && q->level == p->level)
1217 q->level--;
1219 p->level--;
1220 p->addr_taken = 0;
1222 return;
1225 /* Otherwise, preserve all non-kept slots at this level. */
1226 for (p = temp_slots; p; p = p->next)
1227 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1228 p->level--;
1231 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1232 with that RTL_EXPR, promote it into a temporary slot at the present
1233 level so it will not be freed when we free slots made in the
1234 RTL_EXPR. */
1236 void
1237 preserve_rtl_expr_result (x)
1238 rtx x;
1240 struct temp_slot *p;
1242 /* If X is not in memory or is at a constant address, it cannot be in
1243 a temporary slot. */
1244 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1245 return;
1247 /* If we can find a match, move it to our level unless it is already at
1248 an upper level. */
1249 p = find_temp_slot_from_address (XEXP (x, 0));
1250 if (p != 0)
1252 p->level = MIN (p->level, temp_slot_level);
1253 p->rtl_expr = 0;
1256 return;
1259 /* Free all temporaries used so far. This is normally called at the end
1260 of generating code for a statement. Don't free any temporaries
1261 currently in use for an RTL_EXPR that hasn't yet been emitted.
1262 We could eventually do better than this since it can be reused while
1263 generating the same RTL_EXPR, but this is complex and probably not
1264 worthwhile. */
1266 void
1267 free_temp_slots ()
1269 struct temp_slot *p;
1271 for (p = temp_slots; p; p = p->next)
1272 if (p->in_use && p->level == temp_slot_level && ! p->keep
1273 && p->rtl_expr == 0)
1274 p->in_use = 0;
1276 combine_temp_slots ();
1279 /* Free all temporary slots used in T, an RTL_EXPR node. */
1281 void
1282 free_temps_for_rtl_expr (t)
1283 tree t;
1285 struct temp_slot *p;
1287 for (p = temp_slots; p; p = p->next)
1288 if (p->rtl_expr == t)
1289 p->in_use = 0;
1291 combine_temp_slots ();
1294 /* Mark all temporaries ever allocated in this function as not suitable
1295 for reuse until the current level is exited. */
1297 void
1298 mark_all_temps_used ()
1300 struct temp_slot *p;
1302 for (p = temp_slots; p; p = p->next)
1304 p->in_use = p->keep = 1;
1305 p->level = MIN (p->level, temp_slot_level);
1309 /* Push deeper into the nesting level for stack temporaries. */
1311 void
1312 push_temp_slots ()
1314 temp_slot_level++;
1317 /* Likewise, but save the new level as the place to allocate variables
1318 for blocks. */
1320 void
1321 push_temp_slots_for_block ()
1323 push_temp_slots ();
1325 var_temp_slot_level = temp_slot_level;
1328 /* Likewise, but save the new level as the place to allocate temporaries
1329 for TARGET_EXPRs. */
1331 void
1332 push_temp_slots_for_target ()
1334 push_temp_slots ();
1336 target_temp_slot_level = temp_slot_level;
1339 /* Set and get the value of target_temp_slot_level. The only
1340 permitted use of these functions is to save and restore this value. */
1343 get_target_temp_slot_level ()
1345 return target_temp_slot_level;
1348 void
1349 set_target_temp_slot_level (level)
1350 int level;
1352 target_temp_slot_level = level;
1355 /* Pop a temporary nesting level. All slots in use in the current level
1356 are freed. */
1358 void
1359 pop_temp_slots ()
1361 struct temp_slot *p;
1363 for (p = temp_slots; p; p = p->next)
1364 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1365 p->in_use = 0;
1367 combine_temp_slots ();
1369 temp_slot_level--;
1372 /* Initialize temporary slots. */
1374 void
1375 init_temp_slots ()
1377 /* We have not allocated any temporaries yet. */
1378 temp_slots = 0;
1379 temp_slot_level = 0;
1380 var_temp_slot_level = 0;
1381 target_temp_slot_level = 0;
1384 /* Retroactively move an auto variable from a register to a stack slot.
1385 This is done when an address-reference to the variable is seen. */
1387 void
1388 put_var_into_stack (decl)
1389 tree decl;
1391 register rtx reg;
1392 enum machine_mode promoted_mode, decl_mode;
1393 struct function *function = 0;
1394 tree context;
1395 int can_use_addressof;
1397 if (output_bytecode)
1398 return;
1400 context = decl_function_context (decl);
1402 /* Get the current rtl used for this object and its original mode. */
1403 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1405 /* No need to do anything if decl has no rtx yet
1406 since in that case caller is setting TREE_ADDRESSABLE
1407 and a stack slot will be assigned when the rtl is made. */
1408 if (reg == 0)
1409 return;
1411 /* Get the declared mode for this object. */
1412 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1413 : DECL_MODE (decl));
1414 /* Get the mode it's actually stored in. */
1415 promoted_mode = GET_MODE (reg);
1417 /* If this variable comes from an outer function,
1418 find that function's saved context. */
1419 if (context != current_function_decl && context != inline_function_decl)
1420 for (function = outer_function_chain; function; function = function->next)
1421 if (function->decl == context)
1422 break;
1424 /* If this is a variable-size object with a pseudo to address it,
1425 put that pseudo into the stack, if the var is nonlocal. */
1426 if (DECL_NONLOCAL (decl)
1427 && GET_CODE (reg) == MEM
1428 && GET_CODE (XEXP (reg, 0)) == REG
1429 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1431 reg = XEXP (reg, 0);
1432 decl_mode = promoted_mode = GET_MODE (reg);
1435 can_use_addressof
1436 = (function == 0
1437 && optimize > 0
1438 /* FIXME make it work for promoted modes too */
1439 && decl_mode == promoted_mode
1440 #ifdef NON_SAVING_SETJMP
1441 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1442 #endif
1445 /* If we can't use ADDRESSOF, make sure we see through one we already
1446 generated. */
1447 if (! can_use_addressof && GET_CODE (reg) == MEM
1448 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1449 reg = XEXP (XEXP (reg, 0), 0);
1451 /* Now we should have a value that resides in one or more pseudo regs. */
1453 if (GET_CODE (reg) == REG)
1455 /* If this variable lives in the current function and we don't need
1456 to put things in the stack for the sake of setjmp, try to keep it
1457 in a register until we know we actually need the address. */
1458 if (can_use_addressof)
1459 gen_mem_addressof (reg, decl);
1460 else
1461 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1462 promoted_mode, decl_mode,
1463 TREE_SIDE_EFFECTS (decl), 0,
1464 TREE_USED (decl)
1465 || DECL_INITIAL (decl) != 0);
1467 else if (GET_CODE (reg) == CONCAT)
1469 /* A CONCAT contains two pseudos; put them both in the stack.
1470 We do it so they end up consecutive. */
1471 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1472 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1473 #ifdef FRAME_GROWS_DOWNWARD
1474 /* Since part 0 should have a lower address, do it second. */
1475 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1476 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1477 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1478 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1479 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1480 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1481 #else
1482 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1483 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1484 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1485 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1486 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1487 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1488 #endif
1490 /* Change the CONCAT into a combined MEM for both parts. */
1491 PUT_CODE (reg, MEM);
1492 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1494 /* The two parts are in memory order already.
1495 Use the lower parts address as ours. */
1496 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1497 /* Prevent sharing of rtl that might lose. */
1498 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1499 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1501 else
1502 return;
1504 if (current_function_check_memory_usage)
1505 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1506 XEXP (reg, 0), ptr_mode,
1507 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1508 TYPE_MODE (sizetype),
1509 GEN_INT (MEMORY_USE_RW),
1510 TYPE_MODE (integer_type_node));
1513 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1514 into the stack frame of FUNCTION (0 means the current function).
1515 DECL_MODE is the machine mode of the user-level data type.
1516 PROMOTED_MODE is the machine mode of the register.
1517 VOLATILE_P is nonzero if this is for a "volatile" decl.
1518 USED_P is nonzero if this reg might have already been used in an insn. */
1520 static void
1521 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1522 original_regno, used_p)
1523 struct function *function;
1524 rtx reg;
1525 tree type;
1526 enum machine_mode promoted_mode, decl_mode;
1527 int volatile_p;
1528 int original_regno;
1529 int used_p;
1531 rtx new = 0;
1532 int regno = original_regno;
1534 if (regno == 0)
1535 regno = REGNO (reg);
1537 if (function)
1539 if (regno < function->max_parm_reg)
1540 new = function->parm_reg_stack_loc[regno];
1541 if (new == 0)
1542 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1543 0, function);
1545 else
1547 if (regno < max_parm_reg)
1548 new = parm_reg_stack_loc[regno];
1549 if (new == 0)
1550 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1553 PUT_MODE (reg, decl_mode);
1554 XEXP (reg, 0) = XEXP (new, 0);
1555 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1556 MEM_VOLATILE_P (reg) = volatile_p;
1557 PUT_CODE (reg, MEM);
1559 /* If this is a memory ref that contains aggregate components,
1560 mark it as such for cse and loop optimize. */
1561 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1563 /* Now make sure that all refs to the variable, previously made
1564 when it was a register, are fixed up to be valid again. */
1566 if (used_p && function != 0)
1568 struct var_refs_queue *temp;
1570 /* Variable is inherited; fix it up when we get back to its function. */
1571 push_obstacks (function->function_obstack,
1572 function->function_maybepermanent_obstack);
1574 /* See comment in restore_tree_status in tree.c for why this needs to be
1575 on saveable obstack. */
1576 temp
1577 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1578 temp->modified = reg;
1579 temp->promoted_mode = promoted_mode;
1580 temp->unsignedp = TREE_UNSIGNED (type);
1581 temp->next = function->fixup_var_refs_queue;
1582 function->fixup_var_refs_queue = temp;
1583 pop_obstacks ();
1585 else if (used_p)
1586 /* Variable is local; fix it up now. */
1587 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1590 static void
1591 fixup_var_refs (var, promoted_mode, unsignedp)
1592 rtx var;
1593 enum machine_mode promoted_mode;
1594 int unsignedp;
1596 tree pending;
1597 rtx first_insn = get_insns ();
1598 struct sequence_stack *stack = sequence_stack;
1599 tree rtl_exps = rtl_expr_chain;
1601 /* Must scan all insns for stack-refs that exceed the limit. */
1602 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1604 /* Scan all pending sequences too. */
1605 for (; stack; stack = stack->next)
1607 push_to_sequence (stack->first);
1608 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1609 stack->first, stack->next != 0);
1610 /* Update remembered end of sequence
1611 in case we added an insn at the end. */
1612 stack->last = get_last_insn ();
1613 end_sequence ();
1616 /* Scan all waiting RTL_EXPRs too. */
1617 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1619 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1620 if (seq != const0_rtx && seq != 0)
1622 push_to_sequence (seq);
1623 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1624 end_sequence ();
1629 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1630 some part of an insn. Return a struct fixup_replacement whose OLD
1631 value is equal to X. Allocate a new structure if no such entry exists. */
1633 static struct fixup_replacement *
1634 find_fixup_replacement (replacements, x)
1635 struct fixup_replacement **replacements;
1636 rtx x;
1638 struct fixup_replacement *p;
1640 /* See if we have already replaced this. */
1641 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1644 if (p == 0)
1646 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1647 p->old = x;
1648 p->new = 0;
1649 p->next = *replacements;
1650 *replacements = p;
1653 return p;
1656 /* Scan the insn-chain starting with INSN for refs to VAR
1657 and fix them up. TOPLEVEL is nonzero if this chain is the
1658 main chain of insns for the current function. */
1660 static void
1661 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1662 rtx var;
1663 enum machine_mode promoted_mode;
1664 int unsignedp;
1665 rtx insn;
1666 int toplevel;
1668 rtx call_dest = 0;
1670 while (insn)
1672 rtx next = NEXT_INSN (insn);
1673 rtx set, prev, prev_set;
1674 rtx note;
1676 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1678 /* If this is a CLOBBER of VAR, delete it.
1680 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1681 and REG_RETVAL notes too. */
1682 if (GET_CODE (PATTERN (insn)) == CLOBBER
1683 && XEXP (PATTERN (insn), 0) == var)
1685 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1686 /* The REG_LIBCALL note will go away since we are going to
1687 turn INSN into a NOTE, so just delete the
1688 corresponding REG_RETVAL note. */
1689 remove_note (XEXP (note, 0),
1690 find_reg_note (XEXP (note, 0), REG_RETVAL,
1691 NULL_RTX));
1693 /* In unoptimized compilation, we shouldn't call delete_insn
1694 except in jump.c doing warnings. */
1695 PUT_CODE (insn, NOTE);
1696 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1697 NOTE_SOURCE_FILE (insn) = 0;
1700 /* The insn to load VAR from a home in the arglist
1701 is now a no-op. When we see it, just delete it.
1702 Similarly if this is storing VAR from a register from which
1703 it was loaded in the previous insn. This will occur
1704 when an ADDRESSOF was made for an arglist slot. */
1705 else if (toplevel
1706 && (set = single_set (insn)) != 0
1707 && SET_DEST (set) == var
1708 /* If this represents the result of an insn group,
1709 don't delete the insn. */
1710 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1711 && (rtx_equal_p (SET_SRC (set), var)
1712 || (GET_CODE (SET_SRC (set)) == REG
1713 && (prev = prev_nonnote_insn (insn)) != 0
1714 && (prev_set = single_set (prev)) != 0
1715 && SET_DEST (prev_set) == SET_SRC (set)
1716 && rtx_equal_p (SET_SRC (prev_set), var))))
1718 /* In unoptimized compilation, we shouldn't call delete_insn
1719 except in jump.c doing warnings. */
1720 PUT_CODE (insn, NOTE);
1721 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1722 NOTE_SOURCE_FILE (insn) = 0;
1723 if (insn == last_parm_insn)
1724 last_parm_insn = PREV_INSN (next);
1726 else
1728 struct fixup_replacement *replacements = 0;
1729 rtx next_insn = NEXT_INSN (insn);
1731 if (SMALL_REGISTER_CLASSES)
1733 /* If the insn that copies the results of a CALL_INSN
1734 into a pseudo now references VAR, we have to use an
1735 intermediate pseudo since we want the life of the
1736 return value register to be only a single insn.
1738 If we don't use an intermediate pseudo, such things as
1739 address computations to make the address of VAR valid
1740 if it is not can be placed between the CALL_INSN and INSN.
1742 To make sure this doesn't happen, we record the destination
1743 of the CALL_INSN and see if the next insn uses both that
1744 and VAR. */
1746 if (call_dest != 0 && GET_CODE (insn) == INSN
1747 && reg_mentioned_p (var, PATTERN (insn))
1748 && reg_mentioned_p (call_dest, PATTERN (insn)))
1750 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1752 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1754 PATTERN (insn) = replace_rtx (PATTERN (insn),
1755 call_dest, temp);
1758 if (GET_CODE (insn) == CALL_INSN
1759 && GET_CODE (PATTERN (insn)) == SET)
1760 call_dest = SET_DEST (PATTERN (insn));
1761 else if (GET_CODE (insn) == CALL_INSN
1762 && GET_CODE (PATTERN (insn)) == PARALLEL
1763 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1764 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1765 else
1766 call_dest = 0;
1769 /* See if we have to do anything to INSN now that VAR is in
1770 memory. If it needs to be loaded into a pseudo, use a single
1771 pseudo for the entire insn in case there is a MATCH_DUP
1772 between two operands. We pass a pointer to the head of
1773 a list of struct fixup_replacements. If fixup_var_refs_1
1774 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1775 it will record them in this list.
1777 If it allocated a pseudo for any replacement, we copy into
1778 it here. */
1780 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1781 &replacements);
1783 /* If this is last_parm_insn, and any instructions were output
1784 after it to fix it up, then we must set last_parm_insn to
1785 the last such instruction emitted. */
1786 if (insn == last_parm_insn)
1787 last_parm_insn = PREV_INSN (next_insn);
1789 while (replacements)
1791 if (GET_CODE (replacements->new) == REG)
1793 rtx insert_before;
1794 rtx seq;
1796 /* OLD might be a (subreg (mem)). */
1797 if (GET_CODE (replacements->old) == SUBREG)
1798 replacements->old
1799 = fixup_memory_subreg (replacements->old, insn, 0);
1800 else
1801 replacements->old
1802 = fixup_stack_1 (replacements->old, insn);
1804 insert_before = insn;
1806 /* If we are changing the mode, do a conversion.
1807 This might be wasteful, but combine.c will
1808 eliminate much of the waste. */
1810 if (GET_MODE (replacements->new)
1811 != GET_MODE (replacements->old))
1813 start_sequence ();
1814 convert_move (replacements->new,
1815 replacements->old, unsignedp);
1816 seq = gen_sequence ();
1817 end_sequence ();
1819 else
1820 seq = gen_move_insn (replacements->new,
1821 replacements->old);
1823 emit_insn_before (seq, insert_before);
1826 replacements = replacements->next;
1830 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1831 But don't touch other insns referred to by reg-notes;
1832 we will get them elsewhere. */
1833 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1834 if (GET_CODE (note) != INSN_LIST)
1835 XEXP (note, 0)
1836 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1838 insn = next;
1842 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1843 See if the rtx expression at *LOC in INSN needs to be changed.
1845 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1846 contain a list of original rtx's and replacements. If we find that we need
1847 to modify this insn by replacing a memory reference with a pseudo or by
1848 making a new MEM to implement a SUBREG, we consult that list to see if
1849 we have already chosen a replacement. If none has already been allocated,
1850 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1851 or the SUBREG, as appropriate, to the pseudo. */
1853 static void
1854 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1855 register rtx var;
1856 enum machine_mode promoted_mode;
1857 register rtx *loc;
1858 rtx insn;
1859 struct fixup_replacement **replacements;
1861 register int i;
1862 register rtx x = *loc;
1863 RTX_CODE code = GET_CODE (x);
1864 register char *fmt;
1865 register rtx tem, tem1;
1866 struct fixup_replacement *replacement;
1868 switch (code)
1870 case ADDRESSOF:
1871 if (XEXP (x, 0) == var)
1873 /* Prevent sharing of rtl that might lose. */
1874 rtx sub = copy_rtx (XEXP (var, 0));
1876 start_sequence ();
1878 if (! validate_change (insn, loc, sub, 0))
1880 rtx y = force_operand (sub, NULL_RTX);
1882 if (! validate_change (insn, loc, y, 0))
1883 *loc = copy_to_reg (y);
1886 emit_insn_before (gen_sequence (), insn);
1887 end_sequence ();
1889 return;
1891 case MEM:
1892 if (var == x)
1894 /* If we already have a replacement, use it. Otherwise,
1895 try to fix up this address in case it is invalid. */
1897 replacement = find_fixup_replacement (replacements, var);
1898 if (replacement->new)
1900 *loc = replacement->new;
1901 return;
1904 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1906 /* Unless we are forcing memory to register or we changed the mode,
1907 we can leave things the way they are if the insn is valid. */
1909 INSN_CODE (insn) = -1;
1910 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1911 && recog_memoized (insn) >= 0)
1912 return;
1914 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1915 return;
1918 /* If X contains VAR, we need to unshare it here so that we update
1919 each occurrence separately. But all identical MEMs in one insn
1920 must be replaced with the same rtx because of the possibility of
1921 MATCH_DUPs. */
1923 if (reg_mentioned_p (var, x))
1925 replacement = find_fixup_replacement (replacements, x);
1926 if (replacement->new == 0)
1927 replacement->new = copy_most_rtx (x, var);
1929 *loc = x = replacement->new;
1931 break;
1933 case REG:
1934 case CC0:
1935 case PC:
1936 case CONST_INT:
1937 case CONST:
1938 case SYMBOL_REF:
1939 case LABEL_REF:
1940 case CONST_DOUBLE:
1941 return;
1943 case SIGN_EXTRACT:
1944 case ZERO_EXTRACT:
1945 /* Note that in some cases those types of expressions are altered
1946 by optimize_bit_field, and do not survive to get here. */
1947 if (XEXP (x, 0) == var
1948 || (GET_CODE (XEXP (x, 0)) == SUBREG
1949 && SUBREG_REG (XEXP (x, 0)) == var))
1951 /* Get TEM as a valid MEM in the mode presently in the insn.
1953 We don't worry about the possibility of MATCH_DUP here; it
1954 is highly unlikely and would be tricky to handle. */
1956 tem = XEXP (x, 0);
1957 if (GET_CODE (tem) == SUBREG)
1959 if (GET_MODE_BITSIZE (GET_MODE (tem))
1960 > GET_MODE_BITSIZE (GET_MODE (var)))
1962 replacement = find_fixup_replacement (replacements, var);
1963 if (replacement->new == 0)
1964 replacement->new = gen_reg_rtx (GET_MODE (var));
1965 SUBREG_REG (tem) = replacement->new;
1967 else
1968 tem = fixup_memory_subreg (tem, insn, 0);
1970 else
1971 tem = fixup_stack_1 (tem, insn);
1973 /* Unless we want to load from memory, get TEM into the proper mode
1974 for an extract from memory. This can only be done if the
1975 extract is at a constant position and length. */
1977 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1978 && GET_CODE (XEXP (x, 2)) == CONST_INT
1979 && ! mode_dependent_address_p (XEXP (tem, 0))
1980 && ! MEM_VOLATILE_P (tem))
1982 enum machine_mode wanted_mode = VOIDmode;
1983 enum machine_mode is_mode = GET_MODE (tem);
1984 HOST_WIDE_INT width = INTVAL (XEXP (x, 1));
1985 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1987 #ifdef HAVE_extzv
1988 if (GET_CODE (x) == ZERO_EXTRACT)
1989 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1990 #endif
1991 #ifdef HAVE_extv
1992 if (GET_CODE (x) == SIGN_EXTRACT)
1993 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1994 #endif
1995 /* If we have a narrower mode, we can do something. */
1996 if (wanted_mode != VOIDmode
1997 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1999 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2000 rtx old_pos = XEXP (x, 2);
2001 rtx newmem;
2003 /* If the bytes and bits are counted differently, we
2004 must adjust the offset. */
2005 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2006 offset = (GET_MODE_SIZE (is_mode)
2007 - GET_MODE_SIZE (wanted_mode) - offset);
2009 pos %= GET_MODE_BITSIZE (wanted_mode);
2011 newmem = gen_rtx_MEM (wanted_mode,
2012 plus_constant (XEXP (tem, 0), offset));
2013 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2014 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2015 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2017 /* Make the change and see if the insn remains valid. */
2018 INSN_CODE (insn) = -1;
2019 XEXP (x, 0) = newmem;
2020 XEXP (x, 2) = GEN_INT (pos);
2022 if (recog_memoized (insn) >= 0)
2023 return;
2025 /* Otherwise, restore old position. XEXP (x, 0) will be
2026 restored later. */
2027 XEXP (x, 2) = old_pos;
2031 /* If we get here, the bitfield extract insn can't accept a memory
2032 reference. Copy the input into a register. */
2034 tem1 = gen_reg_rtx (GET_MODE (tem));
2035 emit_insn_before (gen_move_insn (tem1, tem), insn);
2036 XEXP (x, 0) = tem1;
2037 return;
2039 break;
2041 case SUBREG:
2042 if (SUBREG_REG (x) == var)
2044 /* If this is a special SUBREG made because VAR was promoted
2045 from a wider mode, replace it with VAR and call ourself
2046 recursively, this time saying that the object previously
2047 had its current mode (by virtue of the SUBREG). */
2049 if (SUBREG_PROMOTED_VAR_P (x))
2051 *loc = var;
2052 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2053 return;
2056 /* If this SUBREG makes VAR wider, it has become a paradoxical
2057 SUBREG with VAR in memory, but these aren't allowed at this
2058 stage of the compilation. So load VAR into a pseudo and take
2059 a SUBREG of that pseudo. */
2060 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2062 replacement = find_fixup_replacement (replacements, var);
2063 if (replacement->new == 0)
2064 replacement->new = gen_reg_rtx (GET_MODE (var));
2065 SUBREG_REG (x) = replacement->new;
2066 return;
2069 /* See if we have already found a replacement for this SUBREG.
2070 If so, use it. Otherwise, make a MEM and see if the insn
2071 is recognized. If not, or if we should force MEM into a register,
2072 make a pseudo for this SUBREG. */
2073 replacement = find_fixup_replacement (replacements, x);
2074 if (replacement->new)
2076 *loc = replacement->new;
2077 return;
2080 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2082 INSN_CODE (insn) = -1;
2083 if (! flag_force_mem && recog_memoized (insn) >= 0)
2084 return;
2086 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2087 return;
2089 break;
2091 case SET:
2092 /* First do special simplification of bit-field references. */
2093 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2094 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2095 optimize_bit_field (x, insn, 0);
2096 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2097 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2098 optimize_bit_field (x, insn, NULL_PTR);
2100 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2101 into a register and then store it back out. */
2102 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2103 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2104 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2105 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2106 > GET_MODE_SIZE (GET_MODE (var))))
2108 replacement = find_fixup_replacement (replacements, var);
2109 if (replacement->new == 0)
2110 replacement->new = gen_reg_rtx (GET_MODE (var));
2112 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2113 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2116 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2117 insn into a pseudo and store the low part of the pseudo into VAR. */
2118 if (GET_CODE (SET_DEST (x)) == SUBREG
2119 && SUBREG_REG (SET_DEST (x)) == var
2120 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2121 > GET_MODE_SIZE (GET_MODE (var))))
2123 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2124 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2125 tem)),
2126 insn);
2127 break;
2131 rtx dest = SET_DEST (x);
2132 rtx src = SET_SRC (x);
2133 rtx outerdest = dest;
2135 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2136 || GET_CODE (dest) == SIGN_EXTRACT
2137 || GET_CODE (dest) == ZERO_EXTRACT)
2138 dest = XEXP (dest, 0);
2140 if (GET_CODE (src) == SUBREG)
2141 src = XEXP (src, 0);
2143 /* If VAR does not appear at the top level of the SET
2144 just scan the lower levels of the tree. */
2146 if (src != var && dest != var)
2147 break;
2149 /* We will need to rerecognize this insn. */
2150 INSN_CODE (insn) = -1;
2152 #ifdef HAVE_insv
2153 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2155 /* Since this case will return, ensure we fixup all the
2156 operands here. */
2157 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2158 insn, replacements);
2159 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2160 insn, replacements);
2161 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2162 insn, replacements);
2164 tem = XEXP (outerdest, 0);
2166 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2167 that may appear inside a ZERO_EXTRACT.
2168 This was legitimate when the MEM was a REG. */
2169 if (GET_CODE (tem) == SUBREG
2170 && SUBREG_REG (tem) == var)
2171 tem = fixup_memory_subreg (tem, insn, 0);
2172 else
2173 tem = fixup_stack_1 (tem, insn);
2175 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2176 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2177 && ! mode_dependent_address_p (XEXP (tem, 0))
2178 && ! MEM_VOLATILE_P (tem))
2180 enum machine_mode wanted_mode
2181 = insn_operand_mode[(int) CODE_FOR_insv][0];
2182 enum machine_mode is_mode = GET_MODE (tem);
2183 HOST_WIDE_INT width = INTVAL (XEXP (outerdest, 1));
2184 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2186 /* If we have a narrower mode, we can do something. */
2187 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2189 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2190 rtx old_pos = XEXP (outerdest, 2);
2191 rtx newmem;
2193 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2194 offset = (GET_MODE_SIZE (is_mode)
2195 - GET_MODE_SIZE (wanted_mode) - offset);
2197 pos %= GET_MODE_BITSIZE (wanted_mode);
2199 newmem = gen_rtx_MEM (wanted_mode,
2200 plus_constant (XEXP (tem, 0),
2201 offset));
2202 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2203 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2204 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2206 /* Make the change and see if the insn remains valid. */
2207 INSN_CODE (insn) = -1;
2208 XEXP (outerdest, 0) = newmem;
2209 XEXP (outerdest, 2) = GEN_INT (pos);
2211 if (recog_memoized (insn) >= 0)
2212 return;
2214 /* Otherwise, restore old position. XEXP (x, 0) will be
2215 restored later. */
2216 XEXP (outerdest, 2) = old_pos;
2220 /* If we get here, the bit-field store doesn't allow memory
2221 or isn't located at a constant position. Load the value into
2222 a register, do the store, and put it back into memory. */
2224 tem1 = gen_reg_rtx (GET_MODE (tem));
2225 emit_insn_before (gen_move_insn (tem1, tem), insn);
2226 emit_insn_after (gen_move_insn (tem, tem1), insn);
2227 XEXP (outerdest, 0) = tem1;
2228 return;
2230 #endif
2232 /* STRICT_LOW_PART is a no-op on memory references
2233 and it can cause combinations to be unrecognizable,
2234 so eliminate it. */
2236 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2237 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2239 /* A valid insn to copy VAR into or out of a register
2240 must be left alone, to avoid an infinite loop here.
2241 If the reference to VAR is by a subreg, fix that up,
2242 since SUBREG is not valid for a memref.
2243 Also fix up the address of the stack slot.
2245 Note that we must not try to recognize the insn until
2246 after we know that we have valid addresses and no
2247 (subreg (mem ...) ...) constructs, since these interfere
2248 with determining the validity of the insn. */
2250 if ((SET_SRC (x) == var
2251 || (GET_CODE (SET_SRC (x)) == SUBREG
2252 && SUBREG_REG (SET_SRC (x)) == var))
2253 && (GET_CODE (SET_DEST (x)) == REG
2254 || (GET_CODE (SET_DEST (x)) == SUBREG
2255 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2256 && GET_MODE (var) == promoted_mode
2257 && x == single_set (insn))
2259 rtx pat;
2261 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2262 if (replacement->new)
2263 SET_SRC (x) = replacement->new;
2264 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2265 SET_SRC (x) = replacement->new
2266 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2267 else
2268 SET_SRC (x) = replacement->new
2269 = fixup_stack_1 (SET_SRC (x), insn);
2271 if (recog_memoized (insn) >= 0)
2272 return;
2274 /* INSN is not valid, but we know that we want to
2275 copy SET_SRC (x) to SET_DEST (x) in some way. So
2276 we generate the move and see whether it requires more
2277 than one insn. If it does, we emit those insns and
2278 delete INSN. Otherwise, we an just replace the pattern
2279 of INSN; we have already verified above that INSN has
2280 no other function that to do X. */
2282 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2283 if (GET_CODE (pat) == SEQUENCE)
2285 emit_insn_after (pat, insn);
2286 PUT_CODE (insn, NOTE);
2287 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2288 NOTE_SOURCE_FILE (insn) = 0;
2290 else
2291 PATTERN (insn) = pat;
2293 return;
2296 if ((SET_DEST (x) == var
2297 || (GET_CODE (SET_DEST (x)) == SUBREG
2298 && SUBREG_REG (SET_DEST (x)) == var))
2299 && (GET_CODE (SET_SRC (x)) == REG
2300 || (GET_CODE (SET_SRC (x)) == SUBREG
2301 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2302 && GET_MODE (var) == promoted_mode
2303 && x == single_set (insn))
2305 rtx pat;
2307 if (GET_CODE (SET_DEST (x)) == SUBREG)
2308 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2309 else
2310 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2312 if (recog_memoized (insn) >= 0)
2313 return;
2315 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2316 if (GET_CODE (pat) == SEQUENCE)
2318 emit_insn_after (pat, insn);
2319 PUT_CODE (insn, NOTE);
2320 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2321 NOTE_SOURCE_FILE (insn) = 0;
2323 else
2324 PATTERN (insn) = pat;
2326 return;
2329 /* Otherwise, storing into VAR must be handled specially
2330 by storing into a temporary and copying that into VAR
2331 with a new insn after this one. Note that this case
2332 will be used when storing into a promoted scalar since
2333 the insn will now have different modes on the input
2334 and output and hence will be invalid (except for the case
2335 of setting it to a constant, which does not need any
2336 change if it is valid). We generate extra code in that case,
2337 but combine.c will eliminate it. */
2339 if (dest == var)
2341 rtx temp;
2342 rtx fixeddest = SET_DEST (x);
2344 /* STRICT_LOW_PART can be discarded, around a MEM. */
2345 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2346 fixeddest = XEXP (fixeddest, 0);
2347 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2348 if (GET_CODE (fixeddest) == SUBREG)
2350 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2351 promoted_mode = GET_MODE (fixeddest);
2353 else
2354 fixeddest = fixup_stack_1 (fixeddest, insn);
2356 temp = gen_reg_rtx (promoted_mode);
2358 emit_insn_after (gen_move_insn (fixeddest,
2359 gen_lowpart (GET_MODE (fixeddest),
2360 temp)),
2361 insn);
2363 SET_DEST (x) = temp;
2367 default:
2368 break;
2371 /* Nothing special about this RTX; fix its operands. */
2373 fmt = GET_RTX_FORMAT (code);
2374 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2376 if (fmt[i] == 'e')
2377 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2378 if (fmt[i] == 'E')
2380 register int j;
2381 for (j = 0; j < XVECLEN (x, i); j++)
2382 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2383 insn, replacements);
2388 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2389 return an rtx (MEM:m1 newaddr) which is equivalent.
2390 If any insns must be emitted to compute NEWADDR, put them before INSN.
2392 UNCRITICAL nonzero means accept paradoxical subregs.
2393 This is used for subregs found inside REG_NOTES. */
2395 static rtx
2396 fixup_memory_subreg (x, insn, uncritical)
2397 rtx x;
2398 rtx insn;
2399 int uncritical;
2401 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2402 rtx addr = XEXP (SUBREG_REG (x), 0);
2403 enum machine_mode mode = GET_MODE (x);
2404 rtx saved, result;
2406 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2407 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2408 && ! uncritical)
2409 abort ();
2411 if (BYTES_BIG_ENDIAN)
2412 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2413 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2414 addr = plus_constant (addr, offset);
2415 if (!flag_force_addr && memory_address_p (mode, addr))
2416 /* Shortcut if no insns need be emitted. */
2417 return change_address (SUBREG_REG (x), mode, addr);
2418 start_sequence ();
2419 result = change_address (SUBREG_REG (x), mode, addr);
2420 emit_insn_before (gen_sequence (), insn);
2421 end_sequence ();
2422 return result;
2425 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2426 Replace subexpressions of X in place.
2427 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2428 Otherwise return X, with its contents possibly altered.
2430 If any insns must be emitted to compute NEWADDR, put them before INSN.
2432 UNCRITICAL is as in fixup_memory_subreg. */
2434 static rtx
2435 walk_fixup_memory_subreg (x, insn, uncritical)
2436 register rtx x;
2437 rtx insn;
2438 int uncritical;
2440 register enum rtx_code code;
2441 register char *fmt;
2442 register int i;
2444 if (x == 0)
2445 return 0;
2447 code = GET_CODE (x);
2449 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2450 return fixup_memory_subreg (x, insn, uncritical);
2452 /* Nothing special about this RTX; fix its operands. */
2454 fmt = GET_RTX_FORMAT (code);
2455 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2457 if (fmt[i] == 'e')
2458 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2459 if (fmt[i] == 'E')
2461 register int j;
2462 for (j = 0; j < XVECLEN (x, i); j++)
2463 XVECEXP (x, i, j)
2464 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2467 return x;
2470 /* For each memory ref within X, if it refers to a stack slot
2471 with an out of range displacement, put the address in a temp register
2472 (emitting new insns before INSN to load these registers)
2473 and alter the memory ref to use that register.
2474 Replace each such MEM rtx with a copy, to avoid clobberage. */
2476 static rtx
2477 fixup_stack_1 (x, insn)
2478 rtx x;
2479 rtx insn;
2481 register int i;
2482 register RTX_CODE code = GET_CODE (x);
2483 register char *fmt;
2485 if (code == MEM)
2487 register rtx ad = XEXP (x, 0);
2488 /* If we have address of a stack slot but it's not valid
2489 (displacement is too large), compute the sum in a register. */
2490 if (GET_CODE (ad) == PLUS
2491 && GET_CODE (XEXP (ad, 0)) == REG
2492 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2493 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2494 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2495 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2496 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2497 #endif
2498 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2499 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2500 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2501 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2503 rtx temp, seq;
2504 if (memory_address_p (GET_MODE (x), ad))
2505 return x;
2507 start_sequence ();
2508 temp = copy_to_reg (ad);
2509 seq = gen_sequence ();
2510 end_sequence ();
2511 emit_insn_before (seq, insn);
2512 return change_address (x, VOIDmode, temp);
2514 return x;
2517 fmt = GET_RTX_FORMAT (code);
2518 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2520 if (fmt[i] == 'e')
2521 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2522 if (fmt[i] == 'E')
2524 register int j;
2525 for (j = 0; j < XVECLEN (x, i); j++)
2526 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2529 return x;
2532 /* Optimization: a bit-field instruction whose field
2533 happens to be a byte or halfword in memory
2534 can be changed to a move instruction.
2536 We call here when INSN is an insn to examine or store into a bit-field.
2537 BODY is the SET-rtx to be altered.
2539 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2540 (Currently this is called only from function.c, and EQUIV_MEM
2541 is always 0.) */
2543 static void
2544 optimize_bit_field (body, insn, equiv_mem)
2545 rtx body;
2546 rtx insn;
2547 rtx *equiv_mem;
2549 register rtx bitfield;
2550 int destflag;
2551 rtx seq = 0;
2552 enum machine_mode mode;
2554 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2555 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2556 bitfield = SET_DEST (body), destflag = 1;
2557 else
2558 bitfield = SET_SRC (body), destflag = 0;
2560 /* First check that the field being stored has constant size and position
2561 and is in fact a byte or halfword suitably aligned. */
2563 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2564 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2565 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2566 != BLKmode)
2567 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2569 register rtx memref = 0;
2571 /* Now check that the containing word is memory, not a register,
2572 and that it is safe to change the machine mode. */
2574 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2575 memref = XEXP (bitfield, 0);
2576 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2577 && equiv_mem != 0)
2578 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2579 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2580 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2581 memref = SUBREG_REG (XEXP (bitfield, 0));
2582 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2583 && equiv_mem != 0
2584 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2585 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2587 if (memref
2588 && ! mode_dependent_address_p (XEXP (memref, 0))
2589 && ! MEM_VOLATILE_P (memref))
2591 /* Now adjust the address, first for any subreg'ing
2592 that we are now getting rid of,
2593 and then for which byte of the word is wanted. */
2595 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2596 rtx insns;
2598 /* Adjust OFFSET to count bits from low-address byte. */
2599 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2600 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2601 - offset - INTVAL (XEXP (bitfield, 1)));
2603 /* Adjust OFFSET to count bytes from low-address byte. */
2604 offset /= BITS_PER_UNIT;
2605 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2607 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2608 if (BYTES_BIG_ENDIAN)
2609 offset -= (MIN (UNITS_PER_WORD,
2610 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2611 - MIN (UNITS_PER_WORD,
2612 GET_MODE_SIZE (GET_MODE (memref))));
2615 start_sequence ();
2616 memref = change_address (memref, mode,
2617 plus_constant (XEXP (memref, 0), offset));
2618 insns = get_insns ();
2619 end_sequence ();
2620 emit_insns_before (insns, insn);
2622 /* Store this memory reference where
2623 we found the bit field reference. */
2625 if (destflag)
2627 validate_change (insn, &SET_DEST (body), memref, 1);
2628 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2630 rtx src = SET_SRC (body);
2631 while (GET_CODE (src) == SUBREG
2632 && SUBREG_WORD (src) == 0)
2633 src = SUBREG_REG (src);
2634 if (GET_MODE (src) != GET_MODE (memref))
2635 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2636 validate_change (insn, &SET_SRC (body), src, 1);
2638 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2639 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2640 /* This shouldn't happen because anything that didn't have
2641 one of these modes should have got converted explicitly
2642 and then referenced through a subreg.
2643 This is so because the original bit-field was
2644 handled by agg_mode and so its tree structure had
2645 the same mode that memref now has. */
2646 abort ();
2648 else
2650 rtx dest = SET_DEST (body);
2652 while (GET_CODE (dest) == SUBREG
2653 && SUBREG_WORD (dest) == 0
2654 && (GET_MODE_CLASS (GET_MODE (dest))
2655 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2656 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2657 <= UNITS_PER_WORD))
2658 dest = SUBREG_REG (dest);
2660 validate_change (insn, &SET_DEST (body), dest, 1);
2662 if (GET_MODE (dest) == GET_MODE (memref))
2663 validate_change (insn, &SET_SRC (body), memref, 1);
2664 else
2666 /* Convert the mem ref to the destination mode. */
2667 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2669 start_sequence ();
2670 convert_move (newreg, memref,
2671 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2672 seq = get_insns ();
2673 end_sequence ();
2675 validate_change (insn, &SET_SRC (body), newreg, 1);
2679 /* See if we can convert this extraction or insertion into
2680 a simple move insn. We might not be able to do so if this
2681 was, for example, part of a PARALLEL.
2683 If we succeed, write out any needed conversions. If we fail,
2684 it is hard to guess why we failed, so don't do anything
2685 special; just let the optimization be suppressed. */
2687 if (apply_change_group () && seq)
2688 emit_insns_before (seq, insn);
2693 /* These routines are responsible for converting virtual register references
2694 to the actual hard register references once RTL generation is complete.
2696 The following four variables are used for communication between the
2697 routines. They contain the offsets of the virtual registers from their
2698 respective hard registers. */
2700 static int in_arg_offset;
2701 static int var_offset;
2702 static int dynamic_offset;
2703 static int out_arg_offset;
2705 /* In most machines, the stack pointer register is equivalent to the bottom
2706 of the stack. */
2708 #ifndef STACK_POINTER_OFFSET
2709 #define STACK_POINTER_OFFSET 0
2710 #endif
2712 /* If not defined, pick an appropriate default for the offset of dynamically
2713 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2714 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2716 #ifndef STACK_DYNAMIC_OFFSET
2718 #ifdef ACCUMULATE_OUTGOING_ARGS
2719 /* The bottom of the stack points to the actual arguments. If
2720 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2721 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2722 stack space for register parameters is not pushed by the caller, but
2723 rather part of the fixed stack areas and hence not included in
2724 `current_function_outgoing_args_size'. Nevertheless, we must allow
2725 for it when allocating stack dynamic objects. */
2727 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2728 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2729 (current_function_outgoing_args_size \
2730 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2732 #else
2733 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2734 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2735 #endif
2737 #else
2738 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2739 #endif
2740 #endif
2742 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2743 its address taken. DECL is the decl for the object stored in the
2744 register, for later use if we do need to force REG into the stack.
2745 REG is overwritten by the MEM like in put_reg_into_stack. */
2748 gen_mem_addressof (reg, decl)
2749 rtx reg;
2750 tree decl;
2752 tree type = TREE_TYPE (decl);
2753 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2755 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2756 SET_ADDRESSOF_DECL (r, decl);
2758 XEXP (reg, 0) = r;
2759 PUT_CODE (reg, MEM);
2760 PUT_MODE (reg, DECL_MODE (decl));
2761 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2762 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2764 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2765 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2767 return reg;
2770 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2772 void
2773 flush_addressof (decl)
2774 tree decl;
2776 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2777 && DECL_RTL (decl) != 0
2778 && GET_CODE (DECL_RTL (decl)) == MEM
2779 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2780 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2781 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2784 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2786 static void
2787 put_addressof_into_stack (r)
2788 rtx r;
2790 tree decl = ADDRESSOF_DECL (r);
2791 rtx reg = XEXP (r, 0);
2793 if (GET_CODE (reg) != REG)
2794 abort ();
2796 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2797 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2798 ADDRESSOF_REGNO (r),
2799 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2802 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2803 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2804 the stack. IN_DEST is nonzero if we are in the destination of a SET. */
2806 static void
2807 purge_addressof_1 (loc, insn, force, in_dest)
2808 rtx *loc;
2809 rtx insn;
2810 int force;
2811 int in_dest;
2813 rtx x;
2814 RTX_CODE code;
2815 int i, j;
2816 char *fmt;
2818 /* Re-start here to avoid recursion in common cases. */
2819 restart:
2821 x = *loc;
2822 if (x == 0)
2823 return;
2825 code = GET_CODE (x);
2827 /* If we don't return in any of the cases below, we will recurse inside
2828 the RTX, which will normally result in any ADDRESSOF being forced into
2829 memory. */
2830 if (code == SET)
2832 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2833 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2834 return;
2837 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2839 /* We must create a copy of the rtx because it was created by
2840 overwriting a REG rtx which is always shared. */
2841 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2842 rtx insns;
2844 if (validate_change (insn, loc, sub, 0)
2845 || validate_replace_rtx (x, sub, insn))
2846 return;
2848 start_sequence ();
2849 sub = force_operand (sub, NULL_RTX);
2850 if (! validate_change (insn, loc, sub, 0)
2851 && ! validate_replace_rtx (x, sub, insn))
2852 abort ();
2854 insns = get_insns ();
2855 end_sequence ();
2856 emit_insns_before (insns, insn);
2857 return;
2860 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2862 rtx sub = XEXP (XEXP (x, 0), 0);
2863 rtx sub2;
2865 if (GET_CODE (sub) == MEM)
2867 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2868 MEM_IN_STRUCT_P (sub2) = MEM_IN_STRUCT_P (sub);
2869 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2870 sub = sub2;
2873 else if (GET_CODE (sub) == REG
2874 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2876 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2878 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2880 sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2881 if (in_dest && GET_MODE_SIZE (GET_MODE (x)) < UNITS_PER_WORD)
2882 sub2 = gen_rtx_STRICT_LOW_PART (GET_MODE (sub2), sub2);
2884 if (validate_change (insn, loc, sub2, 0))
2885 goto restart;
2889 else if (validate_change (insn, loc, sub, 0))
2890 goto restart;
2894 else if (code == ADDRESSOF)
2896 put_addressof_into_stack (x);
2897 return;
2900 /* Scan all subexpressions. */
2901 fmt = GET_RTX_FORMAT (code);
2902 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2904 if (*fmt == 'e')
2905 purge_addressof_1 (&XEXP (x, i), insn, force, in_dest);
2906 else if (*fmt == 'E')
2907 for (j = 0; j < XVECLEN (x, i); j++)
2908 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, in_dest);
2912 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2913 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2914 stack. */
2916 void
2917 purge_addressof (insns)
2918 rtx insns;
2920 rtx insn;
2921 for (insn = insns; insn; insn = NEXT_INSN (insn))
2922 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2923 || GET_CODE (insn) == CALL_INSN)
2925 purge_addressof_1 (&PATTERN (insn), insn,
2926 asm_noperands (PATTERN (insn)) > 0, 0);
2927 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
2931 /* Pass through the INSNS of function FNDECL and convert virtual register
2932 references to hard register references. */
2934 void
2935 instantiate_virtual_regs (fndecl, insns)
2936 tree fndecl;
2937 rtx insns;
2939 rtx insn;
2940 int i;
2942 /* Compute the offsets to use for this function. */
2943 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2944 var_offset = STARTING_FRAME_OFFSET;
2945 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2946 out_arg_offset = STACK_POINTER_OFFSET;
2948 /* Scan all variables and parameters of this function. For each that is
2949 in memory, instantiate all virtual registers if the result is a valid
2950 address. If not, we do it later. That will handle most uses of virtual
2951 regs on many machines. */
2952 instantiate_decls (fndecl, 1);
2954 /* Initialize recognition, indicating that volatile is OK. */
2955 init_recog ();
2957 /* Scan through all the insns, instantiating every virtual register still
2958 present. */
2959 for (insn = insns; insn; insn = NEXT_INSN (insn))
2960 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2961 || GET_CODE (insn) == CALL_INSN)
2963 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2964 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2967 /* Instantiate the stack slots for the parm registers, for later use in
2968 addressof elimination. */
2969 for (i = 0; i < max_parm_reg; ++i)
2970 if (parm_reg_stack_loc[i])
2971 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2973 /* Now instantiate the remaining register equivalences for debugging info.
2974 These will not be valid addresses. */
2975 instantiate_decls (fndecl, 0);
2977 /* Indicate that, from now on, assign_stack_local should use
2978 frame_pointer_rtx. */
2979 virtuals_instantiated = 1;
2982 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2983 all virtual registers in their DECL_RTL's.
2985 If VALID_ONLY, do this only if the resulting address is still valid.
2986 Otherwise, always do it. */
2988 static void
2989 instantiate_decls (fndecl, valid_only)
2990 tree fndecl;
2991 int valid_only;
2993 tree decl;
2995 if (DECL_SAVED_INSNS (fndecl))
2996 /* When compiling an inline function, the obstack used for
2997 rtl allocation is the maybepermanent_obstack. Calling
2998 `resume_temporary_allocation' switches us back to that
2999 obstack while we process this function's parameters. */
3000 resume_temporary_allocation ();
3002 /* Process all parameters of the function. */
3003 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3005 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3007 instantiate_decl (DECL_RTL (decl), size, valid_only);
3009 /* If the parameter was promoted, then the incoming RTL mode may be
3010 larger than the declared type size. We must use the larger of
3011 the two sizes. */
3012 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3013 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3016 /* Now process all variables defined in the function or its subblocks. */
3017 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3019 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3021 /* Save all rtl allocated for this function by raising the
3022 high-water mark on the maybepermanent_obstack. */
3023 preserve_data ();
3024 /* All further rtl allocation is now done in the current_obstack. */
3025 rtl_in_current_obstack ();
3029 /* Subroutine of instantiate_decls: Process all decls in the given
3030 BLOCK node and all its subblocks. */
3032 static void
3033 instantiate_decls_1 (let, valid_only)
3034 tree let;
3035 int valid_only;
3037 tree t;
3039 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3040 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3041 valid_only);
3043 /* Process all subblocks. */
3044 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3045 instantiate_decls_1 (t, valid_only);
3048 /* Subroutine of the preceding procedures: Given RTL representing a
3049 decl and the size of the object, do any instantiation required.
3051 If VALID_ONLY is non-zero, it means that the RTL should only be
3052 changed if the new address is valid. */
3054 static void
3055 instantiate_decl (x, size, valid_only)
3056 rtx x;
3057 int size;
3058 int valid_only;
3060 enum machine_mode mode;
3061 rtx addr;
3063 /* If this is not a MEM, no need to do anything. Similarly if the
3064 address is a constant or a register that is not a virtual register. */
3066 if (x == 0 || GET_CODE (x) != MEM)
3067 return;
3069 addr = XEXP (x, 0);
3070 if (CONSTANT_P (addr)
3071 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3072 || (GET_CODE (addr) == REG
3073 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3074 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3075 return;
3077 /* If we should only do this if the address is valid, copy the address.
3078 We need to do this so we can undo any changes that might make the
3079 address invalid. This copy is unfortunate, but probably can't be
3080 avoided. */
3082 if (valid_only)
3083 addr = copy_rtx (addr);
3085 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3087 if (valid_only)
3089 /* Now verify that the resulting address is valid for every integer or
3090 floating-point mode up to and including SIZE bytes long. We do this
3091 since the object might be accessed in any mode and frame addresses
3092 are shared. */
3094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3095 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3096 mode = GET_MODE_WIDER_MODE (mode))
3097 if (! memory_address_p (mode, addr))
3098 return;
3100 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3101 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3102 mode = GET_MODE_WIDER_MODE (mode))
3103 if (! memory_address_p (mode, addr))
3104 return;
3107 /* Put back the address now that we have updated it and we either know
3108 it is valid or we don't care whether it is valid. */
3110 XEXP (x, 0) = addr;
3113 /* Given a pointer to a piece of rtx and an optional pointer to the
3114 containing object, instantiate any virtual registers present in it.
3116 If EXTRA_INSNS, we always do the replacement and generate
3117 any extra insns before OBJECT. If it zero, we do nothing if replacement
3118 is not valid.
3120 Return 1 if we either had nothing to do or if we were able to do the
3121 needed replacement. Return 0 otherwise; we only return zero if
3122 EXTRA_INSNS is zero.
3124 We first try some simple transformations to avoid the creation of extra
3125 pseudos. */
3127 static int
3128 instantiate_virtual_regs_1 (loc, object, extra_insns)
3129 rtx *loc;
3130 rtx object;
3131 int extra_insns;
3133 rtx x;
3134 RTX_CODE code;
3135 rtx new = 0;
3136 HOST_WIDE_INT offset;
3137 rtx temp;
3138 rtx seq;
3139 int i, j;
3140 char *fmt;
3142 /* Re-start here to avoid recursion in common cases. */
3143 restart:
3145 x = *loc;
3146 if (x == 0)
3147 return 1;
3149 code = GET_CODE (x);
3151 /* Check for some special cases. */
3152 switch (code)
3154 case CONST_INT:
3155 case CONST_DOUBLE:
3156 case CONST:
3157 case SYMBOL_REF:
3158 case CODE_LABEL:
3159 case PC:
3160 case CC0:
3161 case ASM_INPUT:
3162 case ADDR_VEC:
3163 case ADDR_DIFF_VEC:
3164 case RETURN:
3165 return 1;
3167 case SET:
3168 /* We are allowed to set the virtual registers. This means that
3169 that the actual register should receive the source minus the
3170 appropriate offset. This is used, for example, in the handling
3171 of non-local gotos. */
3172 if (SET_DEST (x) == virtual_incoming_args_rtx)
3173 new = arg_pointer_rtx, offset = - in_arg_offset;
3174 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3175 new = frame_pointer_rtx, offset = - var_offset;
3176 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3177 new = stack_pointer_rtx, offset = - dynamic_offset;
3178 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3179 new = stack_pointer_rtx, offset = - out_arg_offset;
3181 if (new)
3183 /* The only valid sources here are PLUS or REG. Just do
3184 the simplest possible thing to handle them. */
3185 if (GET_CODE (SET_SRC (x)) != REG
3186 && GET_CODE (SET_SRC (x)) != PLUS)
3187 abort ();
3189 start_sequence ();
3190 if (GET_CODE (SET_SRC (x)) != REG)
3191 temp = force_operand (SET_SRC (x), NULL_RTX);
3192 else
3193 temp = SET_SRC (x);
3194 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3195 seq = get_insns ();
3196 end_sequence ();
3198 emit_insns_before (seq, object);
3199 SET_DEST (x) = new;
3201 if (! validate_change (object, &SET_SRC (x), temp, 0)
3202 || ! extra_insns)
3203 abort ();
3205 return 1;
3208 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3209 loc = &SET_SRC (x);
3210 goto restart;
3212 case PLUS:
3213 /* Handle special case of virtual register plus constant. */
3214 if (CONSTANT_P (XEXP (x, 1)))
3216 rtx old, new_offset;
3218 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3219 if (GET_CODE (XEXP (x, 0)) == PLUS)
3221 rtx inner = XEXP (XEXP (x, 0), 0);
3223 if (inner == virtual_incoming_args_rtx)
3224 new = arg_pointer_rtx, offset = in_arg_offset;
3225 else if (inner == virtual_stack_vars_rtx)
3226 new = frame_pointer_rtx, offset = var_offset;
3227 else if (inner == virtual_stack_dynamic_rtx)
3228 new = stack_pointer_rtx, offset = dynamic_offset;
3229 else if (inner == virtual_outgoing_args_rtx)
3230 new = stack_pointer_rtx, offset = out_arg_offset;
3231 else
3233 loc = &XEXP (x, 0);
3234 goto restart;
3237 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3238 extra_insns);
3239 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3242 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3243 new = arg_pointer_rtx, offset = in_arg_offset;
3244 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3245 new = frame_pointer_rtx, offset = var_offset;
3246 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3247 new = stack_pointer_rtx, offset = dynamic_offset;
3248 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3249 new = stack_pointer_rtx, offset = out_arg_offset;
3250 else
3252 /* We know the second operand is a constant. Unless the
3253 first operand is a REG (which has been already checked),
3254 it needs to be checked. */
3255 if (GET_CODE (XEXP (x, 0)) != REG)
3257 loc = &XEXP (x, 0);
3258 goto restart;
3260 return 1;
3263 new_offset = plus_constant (XEXP (x, 1), offset);
3265 /* If the new constant is zero, try to replace the sum with just
3266 the register. */
3267 if (new_offset == const0_rtx
3268 && validate_change (object, loc, new, 0))
3269 return 1;
3271 /* Next try to replace the register and new offset.
3272 There are two changes to validate here and we can't assume that
3273 in the case of old offset equals new just changing the register
3274 will yield a valid insn. In the interests of a little efficiency,
3275 however, we only call validate change once (we don't queue up the
3276 changes and then call apply_change_group). */
3278 old = XEXP (x, 0);
3279 if (offset == 0
3280 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3281 : (XEXP (x, 0) = new,
3282 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3284 if (! extra_insns)
3286 XEXP (x, 0) = old;
3287 return 0;
3290 /* Otherwise copy the new constant into a register and replace
3291 constant with that register. */
3292 temp = gen_reg_rtx (Pmode);
3293 XEXP (x, 0) = new;
3294 if (validate_change (object, &XEXP (x, 1), temp, 0))
3295 emit_insn_before (gen_move_insn (temp, new_offset), object);
3296 else
3298 /* If that didn't work, replace this expression with a
3299 register containing the sum. */
3301 XEXP (x, 0) = old;
3302 new = gen_rtx_PLUS (Pmode, new, new_offset);
3304 start_sequence ();
3305 temp = force_operand (new, NULL_RTX);
3306 seq = get_insns ();
3307 end_sequence ();
3309 emit_insns_before (seq, object);
3310 if (! validate_change (object, loc, temp, 0)
3311 && ! validate_replace_rtx (x, temp, object))
3312 abort ();
3316 return 1;
3319 /* Fall through to generic two-operand expression case. */
3320 case EXPR_LIST:
3321 case CALL:
3322 case COMPARE:
3323 case MINUS:
3324 case MULT:
3325 case DIV: case UDIV:
3326 case MOD: case UMOD:
3327 case AND: case IOR: case XOR:
3328 case ROTATERT: case ROTATE:
3329 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3330 case NE: case EQ:
3331 case GE: case GT: case GEU: case GTU:
3332 case LE: case LT: case LEU: case LTU:
3333 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3334 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3335 loc = &XEXP (x, 0);
3336 goto restart;
3338 case MEM:
3339 /* Most cases of MEM that convert to valid addresses have already been
3340 handled by our scan of decls. The only special handling we
3341 need here is to make a copy of the rtx to ensure it isn't being
3342 shared if we have to change it to a pseudo.
3344 If the rtx is a simple reference to an address via a virtual register,
3345 it can potentially be shared. In such cases, first try to make it
3346 a valid address, which can also be shared. Otherwise, copy it and
3347 proceed normally.
3349 First check for common cases that need no processing. These are
3350 usually due to instantiation already being done on a previous instance
3351 of a shared rtx. */
3353 temp = XEXP (x, 0);
3354 if (CONSTANT_ADDRESS_P (temp)
3355 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3356 || temp == arg_pointer_rtx
3357 #endif
3358 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3359 || temp == hard_frame_pointer_rtx
3360 #endif
3361 || temp == frame_pointer_rtx)
3362 return 1;
3364 if (GET_CODE (temp) == PLUS
3365 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3366 && (XEXP (temp, 0) == frame_pointer_rtx
3367 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3368 || XEXP (temp, 0) == hard_frame_pointer_rtx
3369 #endif
3370 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3371 || XEXP (temp, 0) == arg_pointer_rtx
3372 #endif
3374 return 1;
3376 if (temp == virtual_stack_vars_rtx
3377 || temp == virtual_incoming_args_rtx
3378 || (GET_CODE (temp) == PLUS
3379 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3380 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3381 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3383 /* This MEM may be shared. If the substitution can be done without
3384 the need to generate new pseudos, we want to do it in place
3385 so all copies of the shared rtx benefit. The call below will
3386 only make substitutions if the resulting address is still
3387 valid.
3389 Note that we cannot pass X as the object in the recursive call
3390 since the insn being processed may not allow all valid
3391 addresses. However, if we were not passed on object, we can
3392 only modify X without copying it if X will have a valid
3393 address.
3395 ??? Also note that this can still lose if OBJECT is an insn that
3396 has less restrictions on an address that some other insn.
3397 In that case, we will modify the shared address. This case
3398 doesn't seem very likely, though. One case where this could
3399 happen is in the case of a USE or CLOBBER reference, but we
3400 take care of that below. */
3402 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3403 object ? object : x, 0))
3404 return 1;
3406 /* Otherwise make a copy and process that copy. We copy the entire
3407 RTL expression since it might be a PLUS which could also be
3408 shared. */
3409 *loc = x = copy_rtx (x);
3412 /* Fall through to generic unary operation case. */
3413 case SUBREG:
3414 case STRICT_LOW_PART:
3415 case NEG: case NOT:
3416 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3417 case SIGN_EXTEND: case ZERO_EXTEND:
3418 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3419 case FLOAT: case FIX:
3420 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3421 case ABS:
3422 case SQRT:
3423 case FFS:
3424 /* These case either have just one operand or we know that we need not
3425 check the rest of the operands. */
3426 loc = &XEXP (x, 0);
3427 goto restart;
3429 case USE:
3430 case CLOBBER:
3431 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3432 go ahead and make the invalid one, but do it to a copy. For a REG,
3433 just make the recursive call, since there's no chance of a problem. */
3435 if ((GET_CODE (XEXP (x, 0)) == MEM
3436 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3438 || (GET_CODE (XEXP (x, 0)) == REG
3439 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3440 return 1;
3442 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3443 loc = &XEXP (x, 0);
3444 goto restart;
3446 case REG:
3447 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3448 in front of this insn and substitute the temporary. */
3449 if (x == virtual_incoming_args_rtx)
3450 new = arg_pointer_rtx, offset = in_arg_offset;
3451 else if (x == virtual_stack_vars_rtx)
3452 new = frame_pointer_rtx, offset = var_offset;
3453 else if (x == virtual_stack_dynamic_rtx)
3454 new = stack_pointer_rtx, offset = dynamic_offset;
3455 else if (x == virtual_outgoing_args_rtx)
3456 new = stack_pointer_rtx, offset = out_arg_offset;
3458 if (new)
3460 temp = plus_constant (new, offset);
3461 if (!validate_change (object, loc, temp, 0))
3463 if (! extra_insns)
3464 return 0;
3466 start_sequence ();
3467 temp = force_operand (temp, NULL_RTX);
3468 seq = get_insns ();
3469 end_sequence ();
3471 emit_insns_before (seq, object);
3472 if (! validate_change (object, loc, temp, 0)
3473 && ! validate_replace_rtx (x, temp, object))
3474 abort ();
3478 return 1;
3480 case ADDRESSOF:
3481 if (GET_CODE (XEXP (x, 0)) == REG)
3482 return 1;
3484 else if (GET_CODE (XEXP (x, 0)) == MEM)
3486 /* If we have a (addressof (mem ..)), do any instantiation inside
3487 since we know we'll be making the inside valid when we finally
3488 remove the ADDRESSOF. */
3489 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3490 return 1;
3492 break;
3494 default:
3495 break;
3498 /* Scan all subexpressions. */
3499 fmt = GET_RTX_FORMAT (code);
3500 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3501 if (*fmt == 'e')
3503 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3504 return 0;
3506 else if (*fmt == 'E')
3507 for (j = 0; j < XVECLEN (x, i); j++)
3508 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3509 extra_insns))
3510 return 0;
3512 return 1;
3515 /* Optimization: assuming this function does not receive nonlocal gotos,
3516 delete the handlers for such, as well as the insns to establish
3517 and disestablish them. */
3519 static void
3520 delete_handlers ()
3522 rtx insn;
3523 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3525 /* Delete the handler by turning off the flag that would
3526 prevent jump_optimize from deleting it.
3527 Also permit deletion of the nonlocal labels themselves
3528 if nothing local refers to them. */
3529 if (GET_CODE (insn) == CODE_LABEL)
3531 tree t, last_t;
3533 LABEL_PRESERVE_P (insn) = 0;
3535 /* Remove it from the nonlocal_label list, to avoid confusing
3536 flow. */
3537 for (t = nonlocal_labels, last_t = 0; t;
3538 last_t = t, t = TREE_CHAIN (t))
3539 if (DECL_RTL (TREE_VALUE (t)) == insn)
3540 break;
3541 if (t)
3543 if (! last_t)
3544 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3545 else
3546 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3549 if (GET_CODE (insn) == INSN
3550 && ((nonlocal_goto_handler_slot != 0
3551 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3552 || (nonlocal_goto_stack_level != 0
3553 && reg_mentioned_p (nonlocal_goto_stack_level,
3554 PATTERN (insn)))))
3555 delete_insn (insn);
3559 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3560 of the current function. */
3563 nonlocal_label_rtx_list ()
3565 tree t;
3566 rtx x = 0;
3568 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3569 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3571 return x;
3574 /* Output a USE for any register use in RTL.
3575 This is used with -noreg to mark the extent of lifespan
3576 of any registers used in a user-visible variable's DECL_RTL. */
3578 void
3579 use_variable (rtl)
3580 rtx rtl;
3582 if (GET_CODE (rtl) == REG)
3583 /* This is a register variable. */
3584 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3585 else if (GET_CODE (rtl) == MEM
3586 && GET_CODE (XEXP (rtl, 0)) == REG
3587 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3588 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3589 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3590 /* This is a variable-sized structure. */
3591 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3594 /* Like use_variable except that it outputs the USEs after INSN
3595 instead of at the end of the insn-chain. */
3597 void
3598 use_variable_after (rtl, insn)
3599 rtx rtl, insn;
3601 if (GET_CODE (rtl) == REG)
3602 /* This is a register variable. */
3603 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3604 else if (GET_CODE (rtl) == MEM
3605 && GET_CODE (XEXP (rtl, 0)) == REG
3606 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3607 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3608 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3609 /* This is a variable-sized structure. */
3610 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3614 max_parm_reg_num ()
3616 return max_parm_reg;
3619 /* Return the first insn following those generated by `assign_parms'. */
3622 get_first_nonparm_insn ()
3624 if (last_parm_insn)
3625 return NEXT_INSN (last_parm_insn);
3626 return get_insns ();
3629 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3630 Crash if there is none. */
3633 get_first_block_beg ()
3635 register rtx searcher;
3636 register rtx insn = get_first_nonparm_insn ();
3638 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3639 if (GET_CODE (searcher) == NOTE
3640 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3641 return searcher;
3643 abort (); /* Invalid call to this function. (See comments above.) */
3644 return NULL_RTX;
3647 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3648 This means a type for which function calls must pass an address to the
3649 function or get an address back from the function.
3650 EXP may be a type node or an expression (whose type is tested). */
3653 aggregate_value_p (exp)
3654 tree exp;
3656 int i, regno, nregs;
3657 rtx reg;
3658 tree type;
3659 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3660 type = exp;
3661 else
3662 type = TREE_TYPE (exp);
3664 if (RETURN_IN_MEMORY (type))
3665 return 1;
3666 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3667 and thus can't be returned in registers. */
3668 if (TREE_ADDRESSABLE (type))
3669 return 1;
3670 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3671 return 1;
3672 /* Make sure we have suitable call-clobbered regs to return
3673 the value in; if not, we must return it in memory. */
3674 reg = hard_function_value (type, 0);
3676 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3677 it is OK. */
3678 if (GET_CODE (reg) != REG)
3679 return 0;
3681 regno = REGNO (reg);
3682 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3683 for (i = 0; i < nregs; i++)
3684 if (! call_used_regs[regno + i])
3685 return 1;
3686 return 0;
3689 /* Assign RTL expressions to the function's parameters.
3690 This may involve copying them into registers and using
3691 those registers as the RTL for them.
3693 If SECOND_TIME is non-zero it means that this function is being
3694 called a second time. This is done by integrate.c when a function's
3695 compilation is deferred. We need to come back here in case the
3696 FUNCTION_ARG macro computes items needed for the rest of the compilation
3697 (such as changing which registers are fixed or caller-saved). But suppress
3698 writing any insns or setting DECL_RTL of anything in this case. */
3700 void
3701 assign_parms (fndecl, second_time)
3702 tree fndecl;
3703 int second_time;
3705 register tree parm;
3706 register rtx entry_parm = 0;
3707 register rtx stack_parm = 0;
3708 CUMULATIVE_ARGS args_so_far;
3709 enum machine_mode promoted_mode, passed_mode;
3710 enum machine_mode nominal_mode, promoted_nominal_mode;
3711 int unsignedp;
3712 /* Total space needed so far for args on the stack,
3713 given as a constant and a tree-expression. */
3714 struct args_size stack_args_size;
3715 tree fntype = TREE_TYPE (fndecl);
3716 tree fnargs = DECL_ARGUMENTS (fndecl);
3717 /* This is used for the arg pointer when referring to stack args. */
3718 rtx internal_arg_pointer;
3719 /* This is a dummy PARM_DECL that we used for the function result if
3720 the function returns a structure. */
3721 tree function_result_decl = 0;
3722 int varargs_setup = 0;
3723 rtx conversion_insns = 0;
3725 /* Nonzero if the last arg is named `__builtin_va_alist',
3726 which is used on some machines for old-fashioned non-ANSI varargs.h;
3727 this should be stuck onto the stack as if it had arrived there. */
3728 int hide_last_arg
3729 = (current_function_varargs
3730 && fnargs
3731 && (parm = tree_last (fnargs)) != 0
3732 && DECL_NAME (parm)
3733 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3734 "__builtin_va_alist")));
3736 /* Nonzero if function takes extra anonymous args.
3737 This means the last named arg must be on the stack
3738 right before the anonymous ones. */
3739 int stdarg
3740 = (TYPE_ARG_TYPES (fntype) != 0
3741 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3742 != void_type_node));
3744 current_function_stdarg = stdarg;
3746 /* If the reg that the virtual arg pointer will be translated into is
3747 not a fixed reg or is the stack pointer, make a copy of the virtual
3748 arg pointer, and address parms via the copy. The frame pointer is
3749 considered fixed even though it is not marked as such.
3751 The second time through, simply use ap to avoid generating rtx. */
3753 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3754 || ! (fixed_regs[ARG_POINTER_REGNUM]
3755 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3756 && ! second_time)
3757 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3758 else
3759 internal_arg_pointer = virtual_incoming_args_rtx;
3760 current_function_internal_arg_pointer = internal_arg_pointer;
3762 stack_args_size.constant = 0;
3763 stack_args_size.var = 0;
3765 /* If struct value address is treated as the first argument, make it so. */
3766 if (aggregate_value_p (DECL_RESULT (fndecl))
3767 && ! current_function_returns_pcc_struct
3768 && struct_value_incoming_rtx == 0)
3770 tree type = build_pointer_type (TREE_TYPE (fntype));
3772 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3774 DECL_ARG_TYPE (function_result_decl) = type;
3775 TREE_CHAIN (function_result_decl) = fnargs;
3776 fnargs = function_result_decl;
3779 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3780 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3781 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3783 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3784 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3785 #else
3786 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3787 #endif
3789 /* We haven't yet found an argument that we must push and pretend the
3790 caller did. */
3791 current_function_pretend_args_size = 0;
3793 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3795 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3796 struct args_size stack_offset;
3797 struct args_size arg_size;
3798 int passed_pointer = 0;
3799 int did_conversion = 0;
3800 tree passed_type = DECL_ARG_TYPE (parm);
3801 tree nominal_type = TREE_TYPE (parm);
3803 /* Set LAST_NAMED if this is last named arg before some
3804 anonymous args. */
3805 int last_named = ((TREE_CHAIN (parm) == 0
3806 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3807 && (stdarg || current_function_varargs));
3808 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3809 most machines, if this is a varargs/stdarg function, then we treat
3810 the last named arg as if it were anonymous too. */
3811 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3813 if (TREE_TYPE (parm) == error_mark_node
3814 /* This can happen after weird syntax errors
3815 or if an enum type is defined among the parms. */
3816 || TREE_CODE (parm) != PARM_DECL
3817 || passed_type == NULL)
3819 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3820 = gen_rtx_MEM (BLKmode, const0_rtx);
3821 TREE_USED (parm) = 1;
3822 continue;
3825 /* For varargs.h function, save info about regs and stack space
3826 used by the individual args, not including the va_alist arg. */
3827 if (hide_last_arg && last_named)
3828 current_function_args_info = args_so_far;
3830 /* Find mode of arg as it is passed, and mode of arg
3831 as it should be during execution of this function. */
3832 passed_mode = TYPE_MODE (passed_type);
3833 nominal_mode = TYPE_MODE (nominal_type);
3835 /* If the parm's mode is VOID, its value doesn't matter,
3836 and avoid the usual things like emit_move_insn that could crash. */
3837 if (nominal_mode == VOIDmode)
3839 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3840 continue;
3843 /* If the parm is to be passed as a transparent union, use the
3844 type of the first field for the tests below. We have already
3845 verified that the modes are the same. */
3846 if (DECL_TRANSPARENT_UNION (parm)
3847 || TYPE_TRANSPARENT_UNION (passed_type))
3848 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3850 /* See if this arg was passed by invisible reference. It is if
3851 it is an object whose size depends on the contents of the
3852 object itself or if the machine requires these objects be passed
3853 that way. */
3855 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3856 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3857 || TREE_ADDRESSABLE (passed_type)
3858 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3859 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3860 passed_type, named_arg)
3861 #endif
3864 passed_type = nominal_type = build_pointer_type (passed_type);
3865 passed_pointer = 1;
3866 passed_mode = nominal_mode = Pmode;
3869 promoted_mode = passed_mode;
3871 #ifdef PROMOTE_FUNCTION_ARGS
3872 /* Compute the mode in which the arg is actually extended to. */
3873 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3874 #endif
3876 /* Let machine desc say which reg (if any) the parm arrives in.
3877 0 means it arrives on the stack. */
3878 #ifdef FUNCTION_INCOMING_ARG
3879 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3880 passed_type, named_arg);
3881 #else
3882 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3883 passed_type, named_arg);
3884 #endif
3886 if (entry_parm == 0)
3887 promoted_mode = passed_mode;
3889 #ifdef SETUP_INCOMING_VARARGS
3890 /* If this is the last named parameter, do any required setup for
3891 varargs or stdargs. We need to know about the case of this being an
3892 addressable type, in which case we skip the registers it
3893 would have arrived in.
3895 For stdargs, LAST_NAMED will be set for two parameters, the one that
3896 is actually the last named, and the dummy parameter. We only
3897 want to do this action once.
3899 Also, indicate when RTL generation is to be suppressed. */
3900 if (last_named && !varargs_setup)
3902 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3903 current_function_pretend_args_size,
3904 second_time);
3905 varargs_setup = 1;
3907 #endif
3909 /* Determine parm's home in the stack,
3910 in case it arrives in the stack or we should pretend it did.
3912 Compute the stack position and rtx where the argument arrives
3913 and its size.
3915 There is one complexity here: If this was a parameter that would
3916 have been passed in registers, but wasn't only because it is
3917 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3918 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3919 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3920 0 as it was the previous time. */
3922 locate_and_pad_parm (promoted_mode, passed_type,
3923 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3925 #else
3926 #ifdef FUNCTION_INCOMING_ARG
3927 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3928 passed_type,
3929 (named_arg
3930 || varargs_setup)) != 0,
3931 #else
3932 FUNCTION_ARG (args_so_far, promoted_mode,
3933 passed_type,
3934 named_arg || varargs_setup) != 0,
3935 #endif
3936 #endif
3937 fndecl, &stack_args_size, &stack_offset, &arg_size);
3939 if (! second_time)
3941 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3943 if (offset_rtx == const0_rtx)
3944 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3945 else
3946 stack_parm = gen_rtx_MEM (promoted_mode,
3947 gen_rtx_PLUS (Pmode,
3948 internal_arg_pointer,
3949 offset_rtx));
3951 /* If this is a memory ref that contains aggregate components,
3952 mark it as such for cse and loop optimize. Likewise if it
3953 is readonly. */
3954 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3955 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3958 /* If this parameter was passed both in registers and in the stack,
3959 use the copy on the stack. */
3960 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3961 entry_parm = 0;
3963 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3964 /* If this parm was passed part in regs and part in memory,
3965 pretend it arrived entirely in memory
3966 by pushing the register-part onto the stack.
3968 In the special case of a DImode or DFmode that is split,
3969 we could put it together in a pseudoreg directly,
3970 but for now that's not worth bothering with. */
3972 if (entry_parm)
3974 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3975 passed_type, named_arg);
3977 if (nregs > 0)
3979 current_function_pretend_args_size
3980 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3981 / (PARM_BOUNDARY / BITS_PER_UNIT)
3982 * (PARM_BOUNDARY / BITS_PER_UNIT));
3984 if (! second_time)
3986 /* Handle calls that pass values in multiple non-contiguous
3987 locations. The Irix 6 ABI has examples of this. */
3988 if (GET_CODE (entry_parm) == PARALLEL)
3989 emit_group_store (validize_mem (stack_parm), entry_parm);
3990 else
3991 move_block_from_reg (REGNO (entry_parm),
3992 validize_mem (stack_parm), nregs,
3993 int_size_in_bytes (TREE_TYPE (parm)));
3995 entry_parm = stack_parm;
3998 #endif
4000 /* If we didn't decide this parm came in a register,
4001 by default it came on the stack. */
4002 if (entry_parm == 0)
4003 entry_parm = stack_parm;
4005 /* Record permanently how this parm was passed. */
4006 if (! second_time)
4007 DECL_INCOMING_RTL (parm) = entry_parm;
4009 /* If there is actually space on the stack for this parm,
4010 count it in stack_args_size; otherwise set stack_parm to 0
4011 to indicate there is no preallocated stack slot for the parm. */
4013 if (entry_parm == stack_parm
4014 || (GET_CODE (entry_parm) == PARALLEL
4015 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4016 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4017 /* On some machines, even if a parm value arrives in a register
4018 there is still an (uninitialized) stack slot allocated for it.
4020 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4021 whether this parameter already has a stack slot allocated,
4022 because an arg block exists only if current_function_args_size
4023 is larger than some threshold, and we haven't calculated that
4024 yet. So, for now, we just assume that stack slots never exist
4025 in this case. */
4026 || REG_PARM_STACK_SPACE (fndecl) > 0
4027 #endif
4030 stack_args_size.constant += arg_size.constant;
4031 if (arg_size.var)
4032 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4034 else
4035 /* No stack slot was pushed for this parm. */
4036 stack_parm = 0;
4038 /* Update info on where next arg arrives in registers. */
4040 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4041 passed_type, named_arg);
4043 /* If this is our second time through, we are done with this parm. */
4044 if (second_time)
4045 continue;
4047 /* If we can't trust the parm stack slot to be aligned enough
4048 for its ultimate type, don't use that slot after entry.
4049 We'll make another stack slot, if we need one. */
4051 int thisparm_boundary
4052 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4054 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4055 stack_parm = 0;
4058 /* If parm was passed in memory, and we need to convert it on entry,
4059 don't store it back in that same slot. */
4060 if (entry_parm != 0
4061 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4062 stack_parm = 0;
4064 #if 0
4065 /* Now adjust STACK_PARM to the mode and precise location
4066 where this parameter should live during execution,
4067 if we discover that it must live in the stack during execution.
4068 To make debuggers happier on big-endian machines, we store
4069 the value in the last bytes of the space available. */
4071 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4072 && stack_parm != 0)
4074 rtx offset_rtx;
4076 if (BYTES_BIG_ENDIAN
4077 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4078 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4079 - GET_MODE_SIZE (nominal_mode));
4081 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4082 if (offset_rtx == const0_rtx)
4083 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4084 else
4085 stack_parm = gen_rtx_MEM (nominal_mode,
4086 gen_rtx_PLUS (Pmode,
4087 internal_arg_pointer,
4088 offset_rtx));
4090 /* If this is a memory ref that contains aggregate components,
4091 mark it as such for cse and loop optimize. */
4092 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4094 #endif /* 0 */
4096 #ifdef STACK_REGS
4097 /* We need this "use" info, because the gcc-register->stack-register
4098 converter in reg-stack.c needs to know which registers are active
4099 at the start of the function call. The actual parameter loading
4100 instructions are not always available then anymore, since they might
4101 have been optimised away. */
4103 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4104 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4105 #endif
4107 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4108 in the mode in which it arrives.
4109 STACK_PARM is an RTX for a stack slot where the parameter can live
4110 during the function (in case we want to put it there).
4111 STACK_PARM is 0 if no stack slot was pushed for it.
4113 Now output code if necessary to convert ENTRY_PARM to
4114 the type in which this function declares it,
4115 and store that result in an appropriate place,
4116 which may be a pseudo reg, may be STACK_PARM,
4117 or may be a local stack slot if STACK_PARM is 0.
4119 Set DECL_RTL to that place. */
4121 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4123 /* If a BLKmode arrives in registers, copy it to a stack slot.
4124 Handle calls that pass values in multiple non-contiguous
4125 locations. The Irix 6 ABI has examples of this. */
4126 if (GET_CODE (entry_parm) == REG
4127 || GET_CODE (entry_parm) == PARALLEL)
4129 int size_stored
4130 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4131 UNITS_PER_WORD);
4133 /* Note that we will be storing an integral number of words.
4134 So we have to be careful to ensure that we allocate an
4135 integral number of words. We do this below in the
4136 assign_stack_local if space was not allocated in the argument
4137 list. If it was, this will not work if PARM_BOUNDARY is not
4138 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4139 if it becomes a problem. */
4141 if (stack_parm == 0)
4143 stack_parm
4144 = assign_stack_local (GET_MODE (entry_parm),
4145 size_stored, 0);
4147 /* If this is a memory ref that contains aggregate
4148 components, mark it as such for cse and loop optimize. */
4149 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4152 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4153 abort ();
4155 if (TREE_READONLY (parm))
4156 RTX_UNCHANGING_P (stack_parm) = 1;
4158 /* Handle calls that pass values in multiple non-contiguous
4159 locations. The Irix 6 ABI has examples of this. */
4160 if (GET_CODE (entry_parm) == PARALLEL)
4161 emit_group_store (validize_mem (stack_parm), entry_parm);
4162 else
4163 move_block_from_reg (REGNO (entry_parm),
4164 validize_mem (stack_parm),
4165 size_stored / UNITS_PER_WORD,
4166 int_size_in_bytes (TREE_TYPE (parm)));
4168 DECL_RTL (parm) = stack_parm;
4170 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4171 && ! DECL_INLINE (fndecl))
4172 /* layout_decl may set this. */
4173 || TREE_ADDRESSABLE (parm)
4174 || TREE_SIDE_EFFECTS (parm)
4175 /* If -ffloat-store specified, don't put explicit
4176 float variables into registers. */
4177 || (flag_float_store
4178 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4179 /* Always assign pseudo to structure return or item passed
4180 by invisible reference. */
4181 || passed_pointer || parm == function_result_decl)
4183 /* Store the parm in a pseudoregister during the function, but we
4184 may need to do it in a wider mode. */
4186 register rtx parmreg;
4187 int regno, regnoi, regnor;
4189 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4191 promoted_nominal_mode
4192 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4194 parmreg = gen_reg_rtx (promoted_nominal_mode);
4195 mark_user_reg (parmreg);
4197 /* If this was an item that we received a pointer to, set DECL_RTL
4198 appropriately. */
4199 if (passed_pointer)
4201 DECL_RTL (parm)
4202 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4203 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4205 else
4206 DECL_RTL (parm) = parmreg;
4208 /* Copy the value into the register. */
4209 if (nominal_mode != passed_mode
4210 || promoted_nominal_mode != promoted_mode)
4212 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4213 mode, by the caller. We now have to convert it to
4214 NOMINAL_MODE, if different. However, PARMREG may be in
4215 a different mode than NOMINAL_MODE if it is being stored
4216 promoted.
4218 If ENTRY_PARM is a hard register, it might be in a register
4219 not valid for operating in its mode (e.g., an odd-numbered
4220 register for a DFmode). In that case, moves are the only
4221 thing valid, so we can't do a convert from there. This
4222 occurs when the calling sequence allow such misaligned
4223 usages.
4225 In addition, the conversion may involve a call, which could
4226 clobber parameters which haven't been copied to pseudo
4227 registers yet. Therefore, we must first copy the parm to
4228 a pseudo reg here, and save the conversion until after all
4229 parameters have been moved. */
4231 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4233 emit_move_insn (tempreg, validize_mem (entry_parm));
4235 push_to_sequence (conversion_insns);
4236 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4238 expand_assignment (parm,
4239 make_tree (nominal_type, tempreg), 0, 0);
4240 conversion_insns = get_insns ();
4241 did_conversion = 1;
4242 end_sequence ();
4244 else
4245 emit_move_insn (parmreg, validize_mem (entry_parm));
4247 /* If we were passed a pointer but the actual value
4248 can safely live in a register, put it in one. */
4249 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4250 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4251 && ! DECL_INLINE (fndecl))
4252 /* layout_decl may set this. */
4253 || TREE_ADDRESSABLE (parm)
4254 || TREE_SIDE_EFFECTS (parm)
4255 /* If -ffloat-store specified, don't put explicit
4256 float variables into registers. */
4257 || (flag_float_store
4258 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4260 /* We can't use nominal_mode, because it will have been set to
4261 Pmode above. We must use the actual mode of the parm. */
4262 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4263 mark_user_reg (parmreg);
4264 emit_move_insn (parmreg, DECL_RTL (parm));
4265 DECL_RTL (parm) = parmreg;
4266 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4267 now the parm. */
4268 stack_parm = 0;
4270 #ifdef FUNCTION_ARG_CALLEE_COPIES
4271 /* If we are passed an arg by reference and it is our responsibility
4272 to make a copy, do it now.
4273 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4274 original argument, so we must recreate them in the call to
4275 FUNCTION_ARG_CALLEE_COPIES. */
4276 /* ??? Later add code to handle the case that if the argument isn't
4277 modified, don't do the copy. */
4279 else if (passed_pointer
4280 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4281 TYPE_MODE (DECL_ARG_TYPE (parm)),
4282 DECL_ARG_TYPE (parm),
4283 named_arg)
4284 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4286 rtx copy;
4287 tree type = DECL_ARG_TYPE (parm);
4289 /* This sequence may involve a library call perhaps clobbering
4290 registers that haven't been copied to pseudos yet. */
4292 push_to_sequence (conversion_insns);
4294 if (TYPE_SIZE (type) == 0
4295 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4296 /* This is a variable sized object. */
4297 copy = gen_rtx_MEM (BLKmode,
4298 allocate_dynamic_stack_space
4299 (expr_size (parm), NULL_RTX,
4300 TYPE_ALIGN (type)));
4301 else
4302 copy = assign_stack_temp (TYPE_MODE (type),
4303 int_size_in_bytes (type), 1);
4304 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4305 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4307 store_expr (parm, copy, 0);
4308 emit_move_insn (parmreg, XEXP (copy, 0));
4309 if (current_function_check_memory_usage)
4310 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4311 XEXP (copy, 0), ptr_mode,
4312 GEN_INT (int_size_in_bytes (type)),
4313 TYPE_MODE (sizetype),
4314 GEN_INT (MEMORY_USE_RW),
4315 TYPE_MODE (integer_type_node));
4316 conversion_insns = get_insns ();
4317 did_conversion = 1;
4318 end_sequence ();
4320 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4322 /* In any case, record the parm's desired stack location
4323 in case we later discover it must live in the stack.
4325 If it is a COMPLEX value, store the stack location for both
4326 halves. */
4328 if (GET_CODE (parmreg) == CONCAT)
4329 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4330 else
4331 regno = REGNO (parmreg);
4333 if (regno >= max_parm_reg)
4335 rtx *new;
4336 int old_max_parm_reg = max_parm_reg;
4338 /* It's slow to expand this one register at a time,
4339 but it's also rare and we need max_parm_reg to be
4340 precisely correct. */
4341 max_parm_reg = regno + 1;
4342 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4343 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4344 old_max_parm_reg * sizeof (rtx));
4345 bzero ((char *) (new + old_max_parm_reg),
4346 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4347 parm_reg_stack_loc = new;
4350 if (GET_CODE (parmreg) == CONCAT)
4352 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4354 regnor = REGNO (gen_realpart (submode, parmreg));
4355 regnoi = REGNO (gen_imagpart (submode, parmreg));
4357 if (stack_parm != 0)
4359 parm_reg_stack_loc[regnor]
4360 = gen_realpart (submode, stack_parm);
4361 parm_reg_stack_loc[regnoi]
4362 = gen_imagpart (submode, stack_parm);
4364 else
4366 parm_reg_stack_loc[regnor] = 0;
4367 parm_reg_stack_loc[regnoi] = 0;
4370 else
4371 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4373 /* Mark the register as eliminable if we did no conversion
4374 and it was copied from memory at a fixed offset,
4375 and the arg pointer was not copied to a pseudo-reg.
4376 If the arg pointer is a pseudo reg or the offset formed
4377 an invalid address, such memory-equivalences
4378 as we make here would screw up life analysis for it. */
4379 if (nominal_mode == passed_mode
4380 && ! did_conversion
4381 && stack_parm != 0
4382 && GET_CODE (stack_parm) == MEM
4383 && stack_offset.var == 0
4384 && reg_mentioned_p (virtual_incoming_args_rtx,
4385 XEXP (stack_parm, 0)))
4387 rtx linsn = get_last_insn ();
4388 rtx sinsn, set;
4390 /* Mark complex types separately. */
4391 if (GET_CODE (parmreg) == CONCAT)
4392 /* Scan backwards for the set of the real and
4393 imaginary parts. */
4394 for (sinsn = linsn; sinsn != 0;
4395 sinsn = prev_nonnote_insn (sinsn))
4397 set = single_set (sinsn);
4398 if (set != 0
4399 && SET_DEST (set) == regno_reg_rtx [regnoi])
4400 REG_NOTES (sinsn)
4401 = gen_rtx_EXPR_LIST (REG_EQUIV,
4402 parm_reg_stack_loc[regnoi],
4403 REG_NOTES (sinsn));
4404 else if (set != 0
4405 && SET_DEST (set) == regno_reg_rtx [regnor])
4406 REG_NOTES (sinsn)
4407 = gen_rtx_EXPR_LIST (REG_EQUIV,
4408 parm_reg_stack_loc[regnor],
4409 REG_NOTES (sinsn));
4411 else if ((set = single_set (linsn)) != 0
4412 && SET_DEST (set) == parmreg)
4413 REG_NOTES (linsn)
4414 = gen_rtx_EXPR_LIST (REG_EQUIV,
4415 stack_parm, REG_NOTES (linsn));
4418 /* For pointer data type, suggest pointer register. */
4419 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4420 mark_reg_pointer (parmreg,
4421 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4422 / BITS_PER_UNIT));
4424 else
4426 /* Value must be stored in the stack slot STACK_PARM
4427 during function execution. */
4429 if (promoted_mode != nominal_mode)
4431 /* Conversion is required. */
4432 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4434 emit_move_insn (tempreg, validize_mem (entry_parm));
4436 push_to_sequence (conversion_insns);
4437 entry_parm = convert_to_mode (nominal_mode, tempreg,
4438 TREE_UNSIGNED (TREE_TYPE (parm)));
4439 conversion_insns = get_insns ();
4440 did_conversion = 1;
4441 end_sequence ();
4444 if (entry_parm != stack_parm)
4446 if (stack_parm == 0)
4448 stack_parm
4449 = assign_stack_local (GET_MODE (entry_parm),
4450 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4451 /* If this is a memory ref that contains aggregate components,
4452 mark it as such for cse and loop optimize. */
4453 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4456 if (promoted_mode != nominal_mode)
4458 push_to_sequence (conversion_insns);
4459 emit_move_insn (validize_mem (stack_parm),
4460 validize_mem (entry_parm));
4461 conversion_insns = get_insns ();
4462 end_sequence ();
4464 else
4465 emit_move_insn (validize_mem (stack_parm),
4466 validize_mem (entry_parm));
4468 if (current_function_check_memory_usage)
4470 push_to_sequence (conversion_insns);
4471 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4472 XEXP (stack_parm, 0), ptr_mode,
4473 GEN_INT (GET_MODE_SIZE (GET_MODE
4474 (entry_parm))),
4475 TYPE_MODE (sizetype),
4476 GEN_INT (MEMORY_USE_RW),
4477 TYPE_MODE (integer_type_node));
4479 conversion_insns = get_insns ();
4480 end_sequence ();
4482 DECL_RTL (parm) = stack_parm;
4485 /* If this "parameter" was the place where we are receiving the
4486 function's incoming structure pointer, set up the result. */
4487 if (parm == function_result_decl)
4489 tree result = DECL_RESULT (fndecl);
4490 tree restype = TREE_TYPE (result);
4492 DECL_RTL (result)
4493 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4495 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4498 if (TREE_THIS_VOLATILE (parm))
4499 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4500 if (TREE_READONLY (parm))
4501 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4504 /* Output all parameter conversion instructions (possibly including calls)
4505 now that all parameters have been copied out of hard registers. */
4506 emit_insns (conversion_insns);
4508 last_parm_insn = get_last_insn ();
4510 current_function_args_size = stack_args_size.constant;
4512 /* Adjust function incoming argument size for alignment and
4513 minimum length. */
4515 #ifdef REG_PARM_STACK_SPACE
4516 #ifndef MAYBE_REG_PARM_STACK_SPACE
4517 current_function_args_size = MAX (current_function_args_size,
4518 REG_PARM_STACK_SPACE (fndecl));
4519 #endif
4520 #endif
4522 #ifdef STACK_BOUNDARY
4523 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4525 current_function_args_size
4526 = ((current_function_args_size + STACK_BYTES - 1)
4527 / STACK_BYTES) * STACK_BYTES;
4528 #endif
4530 #ifdef ARGS_GROW_DOWNWARD
4531 current_function_arg_offset_rtx
4532 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4533 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4534 size_int (-stack_args_size.constant)),
4535 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4536 #else
4537 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4538 #endif
4540 /* See how many bytes, if any, of its args a function should try to pop
4541 on return. */
4543 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4544 current_function_args_size);
4546 /* For stdarg.h function, save info about
4547 regs and stack space used by the named args. */
4549 if (!hide_last_arg)
4550 current_function_args_info = args_so_far;
4552 /* Set the rtx used for the function return value. Put this in its
4553 own variable so any optimizers that need this information don't have
4554 to include tree.h. Do this here so it gets done when an inlined
4555 function gets output. */
4557 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4560 /* Indicate whether REGNO is an incoming argument to the current function
4561 that was promoted to a wider mode. If so, return the RTX for the
4562 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4563 that REGNO is promoted from and whether the promotion was signed or
4564 unsigned. */
4566 #ifdef PROMOTE_FUNCTION_ARGS
4569 promoted_input_arg (regno, pmode, punsignedp)
4570 int regno;
4571 enum machine_mode *pmode;
4572 int *punsignedp;
4574 tree arg;
4576 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4577 arg = TREE_CHAIN (arg))
4578 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4579 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4580 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4582 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4583 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4585 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4586 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4587 && mode != DECL_MODE (arg))
4589 *pmode = DECL_MODE (arg);
4590 *punsignedp = unsignedp;
4591 return DECL_INCOMING_RTL (arg);
4595 return 0;
4598 #endif
4600 /* Compute the size and offset from the start of the stacked arguments for a
4601 parm passed in mode PASSED_MODE and with type TYPE.
4603 INITIAL_OFFSET_PTR points to the current offset into the stacked
4604 arguments.
4606 The starting offset and size for this parm are returned in *OFFSET_PTR
4607 and *ARG_SIZE_PTR, respectively.
4609 IN_REGS is non-zero if the argument will be passed in registers. It will
4610 never be set if REG_PARM_STACK_SPACE is not defined.
4612 FNDECL is the function in which the argument was defined.
4614 There are two types of rounding that are done. The first, controlled by
4615 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4616 list to be aligned to the specific boundary (in bits). This rounding
4617 affects the initial and starting offsets, but not the argument size.
4619 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4620 optionally rounds the size of the parm to PARM_BOUNDARY. The
4621 initial offset is not affected by this rounding, while the size always
4622 is and the starting offset may be. */
4624 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4625 initial_offset_ptr is positive because locate_and_pad_parm's
4626 callers pass in the total size of args so far as
4627 initial_offset_ptr. arg_size_ptr is always positive.*/
4629 void
4630 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4631 initial_offset_ptr, offset_ptr, arg_size_ptr)
4632 enum machine_mode passed_mode;
4633 tree type;
4634 int in_regs;
4635 tree fndecl;
4636 struct args_size *initial_offset_ptr;
4637 struct args_size *offset_ptr;
4638 struct args_size *arg_size_ptr;
4640 tree sizetree
4641 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4642 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4643 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4644 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4645 int reg_parm_stack_space = 0;
4647 #ifdef REG_PARM_STACK_SPACE
4648 /* If we have found a stack parm before we reach the end of the
4649 area reserved for registers, skip that area. */
4650 if (! in_regs)
4652 #ifdef MAYBE_REG_PARM_STACK_SPACE
4653 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4654 #else
4655 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4656 #endif
4657 if (reg_parm_stack_space > 0)
4659 if (initial_offset_ptr->var)
4661 initial_offset_ptr->var
4662 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4663 size_int (reg_parm_stack_space));
4664 initial_offset_ptr->constant = 0;
4666 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4667 initial_offset_ptr->constant = reg_parm_stack_space;
4670 #endif /* REG_PARM_STACK_SPACE */
4672 arg_size_ptr->var = 0;
4673 arg_size_ptr->constant = 0;
4675 #ifdef ARGS_GROW_DOWNWARD
4676 if (initial_offset_ptr->var)
4678 offset_ptr->constant = 0;
4679 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4680 initial_offset_ptr->var);
4682 else
4684 offset_ptr->constant = - initial_offset_ptr->constant;
4685 offset_ptr->var = 0;
4687 if (where_pad != none
4688 && (TREE_CODE (sizetree) != INTEGER_CST
4689 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4690 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4691 SUB_PARM_SIZE (*offset_ptr, sizetree);
4692 if (where_pad != downward)
4693 pad_to_arg_alignment (offset_ptr, boundary);
4694 if (initial_offset_ptr->var)
4696 arg_size_ptr->var = size_binop (MINUS_EXPR,
4697 size_binop (MINUS_EXPR,
4698 integer_zero_node,
4699 initial_offset_ptr->var),
4700 offset_ptr->var);
4702 else
4704 arg_size_ptr->constant = (- initial_offset_ptr->constant
4705 - offset_ptr->constant);
4707 #else /* !ARGS_GROW_DOWNWARD */
4708 pad_to_arg_alignment (initial_offset_ptr, boundary);
4709 *offset_ptr = *initial_offset_ptr;
4711 #ifdef PUSH_ROUNDING
4712 if (passed_mode != BLKmode)
4713 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4714 #endif
4716 /* Pad_below needs the pre-rounded size to know how much to pad below
4717 so this must be done before rounding up. */
4718 if (where_pad == downward
4719 /* However, BLKmode args passed in regs have their padding done elsewhere.
4720 The stack slot must be able to hold the entire register. */
4721 && !(in_regs && passed_mode == BLKmode))
4722 pad_below (offset_ptr, passed_mode, sizetree);
4724 if (where_pad != none
4725 && (TREE_CODE (sizetree) != INTEGER_CST
4726 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4727 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4729 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4730 #endif /* ARGS_GROW_DOWNWARD */
4733 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4734 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4736 static void
4737 pad_to_arg_alignment (offset_ptr, boundary)
4738 struct args_size *offset_ptr;
4739 int boundary;
4741 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4743 if (boundary > BITS_PER_UNIT)
4745 if (offset_ptr->var)
4747 offset_ptr->var =
4748 #ifdef ARGS_GROW_DOWNWARD
4749 round_down
4750 #else
4751 round_up
4752 #endif
4753 (ARGS_SIZE_TREE (*offset_ptr),
4754 boundary / BITS_PER_UNIT);
4755 offset_ptr->constant = 0; /*?*/
4757 else
4758 offset_ptr->constant =
4759 #ifdef ARGS_GROW_DOWNWARD
4760 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4761 #else
4762 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4763 #endif
4767 static void
4768 pad_below (offset_ptr, passed_mode, sizetree)
4769 struct args_size *offset_ptr;
4770 enum machine_mode passed_mode;
4771 tree sizetree;
4773 if (passed_mode != BLKmode)
4775 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4776 offset_ptr->constant
4777 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4778 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4779 - GET_MODE_SIZE (passed_mode));
4781 else
4783 if (TREE_CODE (sizetree) != INTEGER_CST
4784 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4786 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4787 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4788 /* Add it in. */
4789 ADD_PARM_SIZE (*offset_ptr, s2);
4790 SUB_PARM_SIZE (*offset_ptr, sizetree);
4795 static tree
4796 round_down (value, divisor)
4797 tree value;
4798 int divisor;
4800 return size_binop (MULT_EXPR,
4801 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4802 size_int (divisor));
4805 /* Walk the tree of blocks describing the binding levels within a function
4806 and warn about uninitialized variables.
4807 This is done after calling flow_analysis and before global_alloc
4808 clobbers the pseudo-regs to hard regs. */
4810 void
4811 uninitialized_vars_warning (block)
4812 tree block;
4814 register tree decl, sub;
4815 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4817 if (TREE_CODE (decl) == VAR_DECL
4818 /* These warnings are unreliable for and aggregates
4819 because assigning the fields one by one can fail to convince
4820 flow.c that the entire aggregate was initialized.
4821 Unions are troublesome because members may be shorter. */
4822 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4823 && DECL_RTL (decl) != 0
4824 && GET_CODE (DECL_RTL (decl)) == REG
4825 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4826 warning_with_decl (decl,
4827 "`%s' might be used uninitialized in this function");
4828 if (TREE_CODE (decl) == VAR_DECL
4829 && DECL_RTL (decl) != 0
4830 && GET_CODE (DECL_RTL (decl)) == REG
4831 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4832 warning_with_decl (decl,
4833 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4835 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4836 uninitialized_vars_warning (sub);
4839 /* Do the appropriate part of uninitialized_vars_warning
4840 but for arguments instead of local variables. */
4842 void
4843 setjmp_args_warning ()
4845 register tree decl;
4846 for (decl = DECL_ARGUMENTS (current_function_decl);
4847 decl; decl = TREE_CHAIN (decl))
4848 if (DECL_RTL (decl) != 0
4849 && GET_CODE (DECL_RTL (decl)) == REG
4850 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4851 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4854 /* If this function call setjmp, put all vars into the stack
4855 unless they were declared `register'. */
4857 void
4858 setjmp_protect (block)
4859 tree block;
4861 register tree decl, sub;
4862 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4863 if ((TREE_CODE (decl) == VAR_DECL
4864 || TREE_CODE (decl) == PARM_DECL)
4865 && DECL_RTL (decl) != 0
4866 && (GET_CODE (DECL_RTL (decl)) == REG
4867 || (GET_CODE (DECL_RTL (decl)) == MEM
4868 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4869 /* If this variable came from an inline function, it must be
4870 that its life doesn't overlap the setjmp. If there was a
4871 setjmp in the function, it would already be in memory. We
4872 must exclude such variable because their DECL_RTL might be
4873 set to strange things such as virtual_stack_vars_rtx. */
4874 && ! DECL_FROM_INLINE (decl)
4875 && (
4876 #ifdef NON_SAVING_SETJMP
4877 /* If longjmp doesn't restore the registers,
4878 don't put anything in them. */
4879 NON_SAVING_SETJMP
4881 #endif
4882 ! DECL_REGISTER (decl)))
4883 put_var_into_stack (decl);
4884 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4885 setjmp_protect (sub);
4888 /* Like the previous function, but for args instead of local variables. */
4890 void
4891 setjmp_protect_args ()
4893 register tree decl, sub;
4894 for (decl = DECL_ARGUMENTS (current_function_decl);
4895 decl; decl = TREE_CHAIN (decl))
4896 if ((TREE_CODE (decl) == VAR_DECL
4897 || TREE_CODE (decl) == PARM_DECL)
4898 && DECL_RTL (decl) != 0
4899 && (GET_CODE (DECL_RTL (decl)) == REG
4900 || (GET_CODE (DECL_RTL (decl)) == MEM
4901 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4902 && (
4903 /* If longjmp doesn't restore the registers,
4904 don't put anything in them. */
4905 #ifdef NON_SAVING_SETJMP
4906 NON_SAVING_SETJMP
4908 #endif
4909 ! DECL_REGISTER (decl)))
4910 put_var_into_stack (decl);
4913 /* Return the context-pointer register corresponding to DECL,
4914 or 0 if it does not need one. */
4917 lookup_static_chain (decl)
4918 tree decl;
4920 tree context = decl_function_context (decl);
4921 tree link;
4923 if (context == 0
4924 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4925 return 0;
4927 /* We treat inline_function_decl as an alias for the current function
4928 because that is the inline function whose vars, types, etc.
4929 are being merged into the current function.
4930 See expand_inline_function. */
4931 if (context == current_function_decl || context == inline_function_decl)
4932 return virtual_stack_vars_rtx;
4934 for (link = context_display; link; link = TREE_CHAIN (link))
4935 if (TREE_PURPOSE (link) == context)
4936 return RTL_EXPR_RTL (TREE_VALUE (link));
4938 abort ();
4941 /* Convert a stack slot address ADDR for variable VAR
4942 (from a containing function)
4943 into an address valid in this function (using a static chain). */
4946 fix_lexical_addr (addr, var)
4947 rtx addr;
4948 tree var;
4950 rtx basereg;
4951 HOST_WIDE_INT displacement;
4952 tree context = decl_function_context (var);
4953 struct function *fp;
4954 rtx base = 0;
4956 /* If this is the present function, we need not do anything. */
4957 if (context == current_function_decl || context == inline_function_decl)
4958 return addr;
4960 for (fp = outer_function_chain; fp; fp = fp->next)
4961 if (fp->decl == context)
4962 break;
4964 if (fp == 0)
4965 abort ();
4967 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4968 addr = XEXP (XEXP (addr, 0), 0);
4970 /* Decode given address as base reg plus displacement. */
4971 if (GET_CODE (addr) == REG)
4972 basereg = addr, displacement = 0;
4973 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4974 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4975 else
4976 abort ();
4978 /* We accept vars reached via the containing function's
4979 incoming arg pointer and via its stack variables pointer. */
4980 if (basereg == fp->internal_arg_pointer)
4982 /* If reached via arg pointer, get the arg pointer value
4983 out of that function's stack frame.
4985 There are two cases: If a separate ap is needed, allocate a
4986 slot in the outer function for it and dereference it that way.
4987 This is correct even if the real ap is actually a pseudo.
4988 Otherwise, just adjust the offset from the frame pointer to
4989 compensate. */
4991 #ifdef NEED_SEPARATE_AP
4992 rtx addr;
4994 if (fp->arg_pointer_save_area == 0)
4995 fp->arg_pointer_save_area
4996 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4998 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4999 addr = memory_address (Pmode, addr);
5001 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5002 #else
5003 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5004 base = lookup_static_chain (var);
5005 #endif
5008 else if (basereg == virtual_stack_vars_rtx)
5010 /* This is the same code as lookup_static_chain, duplicated here to
5011 avoid an extra call to decl_function_context. */
5012 tree link;
5014 for (link = context_display; link; link = TREE_CHAIN (link))
5015 if (TREE_PURPOSE (link) == context)
5017 base = RTL_EXPR_RTL (TREE_VALUE (link));
5018 break;
5022 if (base == 0)
5023 abort ();
5025 /* Use same offset, relative to appropriate static chain or argument
5026 pointer. */
5027 return plus_constant (base, displacement);
5030 /* Return the address of the trampoline for entering nested fn FUNCTION.
5031 If necessary, allocate a trampoline (in the stack frame)
5032 and emit rtl to initialize its contents (at entry to this function). */
5035 trampoline_address (function)
5036 tree function;
5038 tree link;
5039 tree rtlexp;
5040 rtx tramp;
5041 struct function *fp;
5042 tree fn_context;
5044 /* Find an existing trampoline and return it. */
5045 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5046 if (TREE_PURPOSE (link) == function)
5047 return
5048 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5050 for (fp = outer_function_chain; fp; fp = fp->next)
5051 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5052 if (TREE_PURPOSE (link) == function)
5054 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5055 function);
5056 return round_trampoline_addr (tramp);
5059 /* None exists; we must make one. */
5061 /* Find the `struct function' for the function containing FUNCTION. */
5062 fp = 0;
5063 fn_context = decl_function_context (function);
5064 if (fn_context != current_function_decl
5065 && fn_context != inline_function_decl)
5066 for (fp = outer_function_chain; fp; fp = fp->next)
5067 if (fp->decl == fn_context)
5068 break;
5070 /* Allocate run-time space for this trampoline
5071 (usually in the defining function's stack frame). */
5072 #ifdef ALLOCATE_TRAMPOLINE
5073 tramp = ALLOCATE_TRAMPOLINE (fp);
5074 #else
5075 /* If rounding needed, allocate extra space
5076 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5077 #ifdef TRAMPOLINE_ALIGNMENT
5078 #define TRAMPOLINE_REAL_SIZE \
5079 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5080 #else
5081 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5082 #endif
5083 if (fp != 0)
5084 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5085 else
5086 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5087 #endif
5089 /* Record the trampoline for reuse and note it for later initialization
5090 by expand_function_end. */
5091 if (fp != 0)
5093 push_obstacks (fp->function_maybepermanent_obstack,
5094 fp->function_maybepermanent_obstack);
5095 rtlexp = make_node (RTL_EXPR);
5096 RTL_EXPR_RTL (rtlexp) = tramp;
5097 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5098 pop_obstacks ();
5100 else
5102 /* Make the RTL_EXPR node temporary, not momentary, so that the
5103 trampoline_list doesn't become garbage. */
5104 int momentary = suspend_momentary ();
5105 rtlexp = make_node (RTL_EXPR);
5106 resume_momentary (momentary);
5108 RTL_EXPR_RTL (rtlexp) = tramp;
5109 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5112 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5113 return round_trampoline_addr (tramp);
5116 /* Given a trampoline address,
5117 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5119 static rtx
5120 round_trampoline_addr (tramp)
5121 rtx tramp;
5123 #ifdef TRAMPOLINE_ALIGNMENT
5124 /* Round address up to desired boundary. */
5125 rtx temp = gen_reg_rtx (Pmode);
5126 temp = expand_binop (Pmode, add_optab, tramp,
5127 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5128 temp, 0, OPTAB_LIB_WIDEN);
5129 tramp = expand_binop (Pmode, and_optab, temp,
5130 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5131 temp, 0, OPTAB_LIB_WIDEN);
5132 #endif
5133 return tramp;
5136 /* The functions identify_blocks and reorder_blocks provide a way to
5137 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5138 duplicate portions of the RTL code. Call identify_blocks before
5139 changing the RTL, and call reorder_blocks after. */
5141 /* Put all this function's BLOCK nodes including those that are chained
5142 onto the first block into a vector, and return it.
5143 Also store in each NOTE for the beginning or end of a block
5144 the index of that block in the vector.
5145 The arguments are BLOCK, the chain of top-level blocks of the function,
5146 and INSNS, the insn chain of the function. */
5148 tree *
5149 identify_blocks (block, insns)
5150 tree block;
5151 rtx insns;
5153 int n_blocks;
5154 tree *block_vector;
5155 int *block_stack;
5156 int depth = 0;
5157 int next_block_number = 1;
5158 int current_block_number = 1;
5159 rtx insn;
5161 if (block == 0)
5162 return 0;
5164 n_blocks = all_blocks (block, 0);
5165 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5166 block_stack = (int *) alloca (n_blocks * sizeof (int));
5168 all_blocks (block, block_vector);
5170 for (insn = insns; insn; insn = NEXT_INSN (insn))
5171 if (GET_CODE (insn) == NOTE)
5173 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5175 block_stack[depth++] = current_block_number;
5176 current_block_number = next_block_number;
5177 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5179 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5181 current_block_number = block_stack[--depth];
5182 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5186 if (n_blocks != next_block_number)
5187 abort ();
5189 return block_vector;
5192 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5193 and a revised instruction chain, rebuild the tree structure
5194 of BLOCK nodes to correspond to the new order of RTL.
5195 The new block tree is inserted below TOP_BLOCK.
5196 Returns the current top-level block. */
5198 tree
5199 reorder_blocks (block_vector, block, insns)
5200 tree *block_vector;
5201 tree block;
5202 rtx insns;
5204 tree current_block = block;
5205 rtx insn;
5207 if (block_vector == 0)
5208 return block;
5210 /* Prune the old trees away, so that it doesn't get in the way. */
5211 BLOCK_SUBBLOCKS (current_block) = 0;
5212 BLOCK_CHAIN (current_block) = 0;
5214 for (insn = insns; insn; insn = NEXT_INSN (insn))
5215 if (GET_CODE (insn) == NOTE)
5217 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5219 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5220 /* If we have seen this block before, copy it. */
5221 if (TREE_ASM_WRITTEN (block))
5222 block = copy_node (block);
5223 BLOCK_SUBBLOCKS (block) = 0;
5224 TREE_ASM_WRITTEN (block) = 1;
5225 BLOCK_SUPERCONTEXT (block) = current_block;
5226 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5227 BLOCK_SUBBLOCKS (current_block) = block;
5228 current_block = block;
5229 NOTE_SOURCE_FILE (insn) = 0;
5231 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5233 BLOCK_SUBBLOCKS (current_block)
5234 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5235 current_block = BLOCK_SUPERCONTEXT (current_block);
5236 NOTE_SOURCE_FILE (insn) = 0;
5240 BLOCK_SUBBLOCKS (current_block)
5241 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5242 return current_block;
5245 /* Reverse the order of elements in the chain T of blocks,
5246 and return the new head of the chain (old last element). */
5248 static tree
5249 blocks_nreverse (t)
5250 tree t;
5252 register tree prev = 0, decl, next;
5253 for (decl = t; decl; decl = next)
5255 next = BLOCK_CHAIN (decl);
5256 BLOCK_CHAIN (decl) = prev;
5257 prev = decl;
5259 return prev;
5262 /* Count the subblocks of the list starting with BLOCK, and list them
5263 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5264 blocks. */
5266 static int
5267 all_blocks (block, vector)
5268 tree block;
5269 tree *vector;
5271 int n_blocks = 0;
5273 while (block)
5275 TREE_ASM_WRITTEN (block) = 0;
5277 /* Record this block. */
5278 if (vector)
5279 vector[n_blocks] = block;
5281 ++n_blocks;
5283 /* Record the subblocks, and their subblocks... */
5284 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5285 vector ? vector + n_blocks : 0);
5286 block = BLOCK_CHAIN (block);
5289 return n_blocks;
5292 /* Build bytecode call descriptor for function SUBR. */
5295 bc_build_calldesc (subr)
5296 tree subr;
5298 tree calldesc = 0, arg;
5299 int nargs = 0;
5301 /* Build the argument description vector in reverse order. */
5302 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
5303 nargs = 0;
5305 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
5307 ++nargs;
5309 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
5310 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
5313 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
5315 /* Prepend the function's return type. */
5316 calldesc = tree_cons ((tree) 0,
5317 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
5318 calldesc);
5320 calldesc = tree_cons ((tree) 0,
5321 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
5322 calldesc);
5324 /* Prepend the arg count. */
5325 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
5327 /* Output the call description vector and get its address. */
5328 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
5329 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
5330 build_index_type (build_int_2 (nargs * 2, 0)));
5332 return output_constant_def (calldesc);
5336 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5337 and initialize static variables for generating RTL for the statements
5338 of the function. */
5340 void
5341 init_function_start (subr, filename, line)
5342 tree subr;
5343 char *filename;
5344 int line;
5346 if (output_bytecode)
5348 this_function_decl = subr;
5349 this_function_calldesc = bc_build_calldesc (subr);
5350 local_vars_size = 0;
5351 stack_depth = 0;
5352 max_stack_depth = 0;
5353 stmt_expr_depth = 0;
5354 return;
5357 init_stmt_for_function ();
5359 cse_not_expected = ! optimize;
5361 /* Caller save not needed yet. */
5362 caller_save_needed = 0;
5364 /* No stack slots have been made yet. */
5365 stack_slot_list = 0;
5367 /* There is no stack slot for handling nonlocal gotos. */
5368 nonlocal_goto_handler_slot = 0;
5369 nonlocal_goto_stack_level = 0;
5371 /* No labels have been declared for nonlocal use. */
5372 nonlocal_labels = 0;
5374 /* No function calls so far in this function. */
5375 function_call_count = 0;
5377 /* No parm regs have been allocated.
5378 (This is important for output_inline_function.) */
5379 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5381 /* Initialize the RTL mechanism. */
5382 init_emit ();
5384 /* Initialize the queue of pending postincrement and postdecrements,
5385 and some other info in expr.c. */
5386 init_expr ();
5388 /* We haven't done register allocation yet. */
5389 reg_renumber = 0;
5391 init_const_rtx_hash_table ();
5393 current_function_name = (*decl_printable_name) (subr, 2);
5395 /* Nonzero if this is a nested function that uses a static chain. */
5397 current_function_needs_context
5398 = (decl_function_context (current_function_decl) != 0
5399 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5401 /* Set if a call to setjmp is seen. */
5402 current_function_calls_setjmp = 0;
5404 /* Set if a call to longjmp is seen. */
5405 current_function_calls_longjmp = 0;
5407 current_function_calls_alloca = 0;
5408 current_function_has_nonlocal_label = 0;
5409 current_function_has_nonlocal_goto = 0;
5410 current_function_contains_functions = 0;
5411 current_function_addresses_labels = 0;
5412 current_function_is_thunk = 0;
5414 current_function_returns_pcc_struct = 0;
5415 current_function_returns_struct = 0;
5416 current_function_epilogue_delay_list = 0;
5417 current_function_uses_const_pool = 0;
5418 current_function_uses_pic_offset_table = 0;
5420 /* We have not yet needed to make a label to jump to for tail-recursion. */
5421 tail_recursion_label = 0;
5423 /* We haven't had a need to make a save area for ap yet. */
5425 arg_pointer_save_area = 0;
5427 /* No stack slots allocated yet. */
5428 frame_offset = 0;
5430 /* No SAVE_EXPRs in this function yet. */
5431 save_expr_regs = 0;
5433 /* No RTL_EXPRs in this function yet. */
5434 rtl_expr_chain = 0;
5436 /* Set up to allocate temporaries. */
5437 init_temp_slots ();
5439 /* Within function body, compute a type's size as soon it is laid out. */
5440 immediate_size_expand++;
5442 /* We haven't made any trampolines for this function yet. */
5443 trampoline_list = 0;
5445 init_pending_stack_adjust ();
5446 inhibit_defer_pop = 0;
5448 current_function_outgoing_args_size = 0;
5450 /* Prevent ever trying to delete the first instruction of a function.
5451 Also tell final how to output a linenum before the function prologue. */
5452 emit_line_note (filename, line);
5454 /* Make sure first insn is a note even if we don't want linenums.
5455 This makes sure the first insn will never be deleted.
5456 Also, final expects a note to appear there. */
5457 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5459 /* Set flags used by final.c. */
5460 if (aggregate_value_p (DECL_RESULT (subr)))
5462 #ifdef PCC_STATIC_STRUCT_RETURN
5463 current_function_returns_pcc_struct = 1;
5464 #endif
5465 current_function_returns_struct = 1;
5468 /* Warn if this value is an aggregate type,
5469 regardless of which calling convention we are using for it. */
5470 if (warn_aggregate_return
5471 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5472 warning ("function returns an aggregate");
5474 current_function_returns_pointer
5475 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5477 /* Indicate that we need to distinguish between the return value of the
5478 present function and the return value of a function being called. */
5479 rtx_equal_function_value_matters = 1;
5481 /* Indicate that we have not instantiated virtual registers yet. */
5482 virtuals_instantiated = 0;
5484 /* Indicate we have no need of a frame pointer yet. */
5485 frame_pointer_needed = 0;
5487 /* By default assume not varargs or stdarg. */
5488 current_function_varargs = 0;
5489 current_function_stdarg = 0;
5492 /* Indicate that the current function uses extra args
5493 not explicitly mentioned in the argument list in any fashion. */
5495 void
5496 mark_varargs ()
5498 current_function_varargs = 1;
5501 /* Expand a call to __main at the beginning of a possible main function. */
5503 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5504 #undef HAS_INIT_SECTION
5505 #define HAS_INIT_SECTION
5506 #endif
5508 void
5509 expand_main_function ()
5511 if (!output_bytecode)
5513 /* The zero below avoids a possible parse error */
5515 #if !defined (HAS_INIT_SECTION)
5516 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5517 VOIDmode, 0);
5518 #endif /* not HAS_INIT_SECTION */
5522 extern struct obstack permanent_obstack;
5524 /* Expand start of bytecode function. See comment at
5525 expand_function_start below for details. */
5527 void
5528 bc_expand_function_start (subr, parms_have_cleanups)
5529 tree subr;
5530 int parms_have_cleanups;
5532 char label[20], *name;
5533 static int nlab;
5534 tree thisarg;
5535 int argsz;
5537 if (TREE_PUBLIC (subr))
5538 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5540 #ifdef DEBUG_PRINT_CODE
5541 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5542 #endif
5544 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5546 if (DECL_RTL (thisarg))
5547 abort (); /* Should be NULL here I think. */
5548 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5550 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5551 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5553 else
5555 /* Variable-sized objects are pointers to their storage. */
5556 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5557 argsz += POINTER_SIZE;
5561 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5563 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5565 ++nlab;
5566 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5567 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5568 this_function_bytecode
5569 = bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5573 /* Expand end of bytecode function. See details the comment of
5574 expand_function_end(), below. */
5576 void
5577 bc_expand_function_end ()
5579 char *ptrconsts;
5581 expand_null_return ();
5583 /* Emit any fixup code. This must be done before the call to
5584 to BC_END_FUNCTION (), since that will cause the bytecode
5585 segment to be finished off and closed. */
5587 expand_fixups (NULL_RTX);
5589 ptrconsts = bc_end_function ();
5591 bc_align_const (2 /* INT_ALIGN */);
5593 /* If this changes also make sure to change bc-interp.h! */
5595 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5596 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5597 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5598 bc_emit_const_labelref (this_function_bytecode, 0);
5599 bc_emit_const_labelref (ptrconsts, 0);
5600 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5604 /* Start the RTL for a new function, and set variables used for
5605 emitting RTL.
5606 SUBR is the FUNCTION_DECL node.
5607 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5608 the function's parameters, which must be run at any return statement. */
5610 void
5611 expand_function_start (subr, parms_have_cleanups)
5612 tree subr;
5613 int parms_have_cleanups;
5615 register int i;
5616 tree tem;
5617 rtx last_ptr;
5619 if (output_bytecode)
5621 bc_expand_function_start (subr, parms_have_cleanups);
5622 return;
5625 /* Make sure volatile mem refs aren't considered
5626 valid operands of arithmetic insns. */
5627 init_recog_no_volatile ();
5629 /* Set this before generating any memory accesses. */
5630 current_function_check_memory_usage
5631 = (flag_check_memory_usage
5632 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5634 /* If function gets a static chain arg, store it in the stack frame.
5635 Do this first, so it gets the first stack slot offset. */
5636 if (current_function_needs_context)
5638 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5640 /* Delay copying static chain if it is not a register to avoid
5641 conflicts with regs used for parameters. */
5642 if (! SMALL_REGISTER_CLASSES
5643 || GET_CODE (static_chain_incoming_rtx) == REG)
5644 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5647 /* If the parameters of this function need cleaning up, get a label
5648 for the beginning of the code which executes those cleanups. This must
5649 be done before doing anything with return_label. */
5650 if (parms_have_cleanups)
5651 cleanup_label = gen_label_rtx ();
5652 else
5653 cleanup_label = 0;
5655 /* Make the label for return statements to jump to, if this machine
5656 does not have a one-instruction return and uses an epilogue,
5657 or if it returns a structure, or if it has parm cleanups. */
5658 #ifdef HAVE_return
5659 if (cleanup_label == 0 && HAVE_return
5660 && ! current_function_returns_pcc_struct
5661 && ! (current_function_returns_struct && ! optimize))
5662 return_label = 0;
5663 else
5664 return_label = gen_label_rtx ();
5665 #else
5666 return_label = gen_label_rtx ();
5667 #endif
5669 /* Initialize rtx used to return the value. */
5670 /* Do this before assign_parms so that we copy the struct value address
5671 before any library calls that assign parms might generate. */
5673 /* Decide whether to return the value in memory or in a register. */
5674 if (aggregate_value_p (DECL_RESULT (subr)))
5676 /* Returning something that won't go in a register. */
5677 register rtx value_address = 0;
5679 #ifdef PCC_STATIC_STRUCT_RETURN
5680 if (current_function_returns_pcc_struct)
5682 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5683 value_address = assemble_static_space (size);
5685 else
5686 #endif
5688 /* Expect to be passed the address of a place to store the value.
5689 If it is passed as an argument, assign_parms will take care of
5690 it. */
5691 if (struct_value_incoming_rtx)
5693 value_address = gen_reg_rtx (Pmode);
5694 emit_move_insn (value_address, struct_value_incoming_rtx);
5697 if (value_address)
5699 DECL_RTL (DECL_RESULT (subr))
5700 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5701 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5702 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5705 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5706 /* If return mode is void, this decl rtl should not be used. */
5707 DECL_RTL (DECL_RESULT (subr)) = 0;
5708 else if (parms_have_cleanups)
5710 /* If function will end with cleanup code for parms,
5711 compute the return values into a pseudo reg,
5712 which we will copy into the true return register
5713 after the cleanups are done. */
5715 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5717 #ifdef PROMOTE_FUNCTION_RETURN
5718 tree type = TREE_TYPE (DECL_RESULT (subr));
5719 int unsignedp = TREE_UNSIGNED (type);
5721 mode = promote_mode (type, mode, &unsignedp, 1);
5722 #endif
5724 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5726 else
5727 /* Scalar, returned in a register. */
5729 #ifdef FUNCTION_OUTGOING_VALUE
5730 DECL_RTL (DECL_RESULT (subr))
5731 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5732 #else
5733 DECL_RTL (DECL_RESULT (subr))
5734 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5735 #endif
5737 /* Mark this reg as the function's return value. */
5738 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5740 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5741 /* Needed because we may need to move this to memory
5742 in case it's a named return value whose address is taken. */
5743 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5747 /* Initialize rtx for parameters and local variables.
5748 In some cases this requires emitting insns. */
5750 assign_parms (subr, 0);
5752 /* Copy the static chain now if it wasn't a register. The delay is to
5753 avoid conflicts with the parameter passing registers. */
5755 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5756 if (GET_CODE (static_chain_incoming_rtx) != REG)
5757 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5759 /* The following was moved from init_function_start.
5760 The move is supposed to make sdb output more accurate. */
5761 /* Indicate the beginning of the function body,
5762 as opposed to parm setup. */
5763 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5765 /* If doing stupid allocation, mark parms as born here. */
5767 if (GET_CODE (get_last_insn ()) != NOTE)
5768 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5769 parm_birth_insn = get_last_insn ();
5771 if (obey_regdecls)
5773 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5774 use_variable (regno_reg_rtx[i]);
5776 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5777 use_variable (current_function_internal_arg_pointer);
5780 context_display = 0;
5781 if (current_function_needs_context)
5783 /* Fetch static chain values for containing functions. */
5784 tem = decl_function_context (current_function_decl);
5785 /* If not doing stupid register allocation copy the static chain
5786 pointer into a pseudo. If we have small register classes, copy
5787 the value from memory if static_chain_incoming_rtx is a REG. If
5788 we do stupid register allocation, we use the stack address
5789 generated above. */
5790 if (tem && ! obey_regdecls)
5792 /* If the static chain originally came in a register, put it back
5793 there, then move it out in the next insn. The reason for
5794 this peculiar code is to satisfy function integration. */
5795 if (SMALL_REGISTER_CLASSES
5796 && GET_CODE (static_chain_incoming_rtx) == REG)
5797 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5798 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5801 while (tem)
5803 tree rtlexp = make_node (RTL_EXPR);
5805 RTL_EXPR_RTL (rtlexp) = last_ptr;
5806 context_display = tree_cons (tem, rtlexp, context_display);
5807 tem = decl_function_context (tem);
5808 if (tem == 0)
5809 break;
5810 /* Chain thru stack frames, assuming pointer to next lexical frame
5811 is found at the place we always store it. */
5812 #ifdef FRAME_GROWS_DOWNWARD
5813 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5814 #endif
5815 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5816 memory_address (Pmode,
5817 last_ptr)));
5819 /* If we are not optimizing, ensure that we know that this
5820 piece of context is live over the entire function. */
5821 if (! optimize)
5822 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5823 save_expr_regs);
5827 /* After the display initializations is where the tail-recursion label
5828 should go, if we end up needing one. Ensure we have a NOTE here
5829 since some things (like trampolines) get placed before this. */
5830 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5832 /* Evaluate now the sizes of any types declared among the arguments. */
5833 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5835 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5836 EXPAND_MEMORY_USE_BAD);
5837 /* Flush the queue in case this parameter declaration has
5838 side-effects. */
5839 emit_queue ();
5842 /* Make sure there is a line number after the function entry setup code. */
5843 force_next_line_note ();
5846 /* Generate RTL for the end of the current function.
5847 FILENAME and LINE are the current position in the source file.
5849 It is up to language-specific callers to do cleanups for parameters--
5850 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5852 void
5853 expand_function_end (filename, line, end_bindings)
5854 char *filename;
5855 int line;
5856 int end_bindings;
5858 register int i;
5859 tree link;
5861 #ifdef TRAMPOLINE_TEMPLATE
5862 static rtx initial_trampoline;
5863 #endif
5865 if (output_bytecode)
5867 bc_expand_function_end ();
5868 return;
5871 #ifdef NON_SAVING_SETJMP
5872 /* Don't put any variables in registers if we call setjmp
5873 on a machine that fails to restore the registers. */
5874 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5876 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5877 setjmp_protect (DECL_INITIAL (current_function_decl));
5879 setjmp_protect_args ();
5881 #endif
5883 /* Save the argument pointer if a save area was made for it. */
5884 if (arg_pointer_save_area)
5886 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5887 emit_insn_before (x, tail_recursion_reentry);
5890 /* Initialize any trampolines required by this function. */
5891 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5893 tree function = TREE_PURPOSE (link);
5894 rtx context = lookup_static_chain (function);
5895 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5896 rtx blktramp;
5897 rtx seq;
5899 #ifdef TRAMPOLINE_TEMPLATE
5900 /* First make sure this compilation has a template for
5901 initializing trampolines. */
5902 if (initial_trampoline == 0)
5904 end_temporary_allocation ();
5905 initial_trampoline
5906 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5907 resume_temporary_allocation ();
5909 #endif
5911 /* Generate insns to initialize the trampoline. */
5912 start_sequence ();
5913 tramp = round_trampoline_addr (XEXP (tramp, 0));
5914 #ifdef TRAMPOLINE_TEMPLATE
5915 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5916 emit_block_move (blktramp, initial_trampoline,
5917 GEN_INT (TRAMPOLINE_SIZE),
5918 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5919 #endif
5920 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5921 seq = get_insns ();
5922 end_sequence ();
5924 /* Put those insns at entry to the containing function (this one). */
5925 emit_insns_before (seq, tail_recursion_reentry);
5928 /* If we are doing stack checking and this function makes calls,
5929 do a stack probe at the start of the function to ensure we have enough
5930 space for another stack frame. */
5931 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5933 rtx insn, seq;
5935 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5936 if (GET_CODE (insn) == CALL_INSN)
5938 start_sequence ();
5939 probe_stack_range (STACK_CHECK_PROTECT,
5940 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5941 seq = get_insns ();
5942 end_sequence ();
5943 emit_insns_before (seq, tail_recursion_reentry);
5944 break;
5948 /* Warn about unused parms if extra warnings were specified. */
5949 if (warn_unused && extra_warnings)
5951 tree decl;
5953 for (decl = DECL_ARGUMENTS (current_function_decl);
5954 decl; decl = TREE_CHAIN (decl))
5955 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5956 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5957 warning_with_decl (decl, "unused parameter `%s'");
5960 /* Delete handlers for nonlocal gotos if nothing uses them. */
5961 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5962 delete_handlers ();
5964 /* End any sequences that failed to be closed due to syntax errors. */
5965 while (in_sequence_p ())
5966 end_sequence ();
5968 /* Outside function body, can't compute type's actual size
5969 until next function's body starts. */
5970 immediate_size_expand--;
5972 /* If doing stupid register allocation,
5973 mark register parms as dying here. */
5975 if (obey_regdecls)
5977 rtx tem;
5978 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5979 use_variable (regno_reg_rtx[i]);
5981 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5983 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5985 use_variable (XEXP (tem, 0));
5986 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5989 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5990 use_variable (current_function_internal_arg_pointer);
5993 clear_pending_stack_adjust ();
5994 do_pending_stack_adjust ();
5996 /* Mark the end of the function body.
5997 If control reaches this insn, the function can drop through
5998 without returning a value. */
5999 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6001 /* Must mark the last line number note in the function, so that the test
6002 coverage code can avoid counting the last line twice. This just tells
6003 the code to ignore the immediately following line note, since there
6004 already exists a copy of this note somewhere above. This line number
6005 note is still needed for debugging though, so we can't delete it. */
6006 if (flag_test_coverage)
6007 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6009 /* Output a linenumber for the end of the function.
6010 SDB depends on this. */
6011 emit_line_note_force (filename, line);
6013 /* Output the label for the actual return from the function,
6014 if one is expected. This happens either because a function epilogue
6015 is used instead of a return instruction, or because a return was done
6016 with a goto in order to run local cleanups, or because of pcc-style
6017 structure returning. */
6019 if (return_label)
6020 emit_label (return_label);
6022 /* C++ uses this. */
6023 if (end_bindings)
6024 expand_end_bindings (0, 0, 0);
6026 /* Now handle any leftover exception regions that may have been
6027 created for the parameters. */
6029 rtx last = get_last_insn ();
6030 rtx label;
6032 expand_leftover_cleanups ();
6034 /* If the above emitted any code, may sure we jump around it. */
6035 if (last != get_last_insn ())
6037 label = gen_label_rtx ();
6038 last = emit_jump_insn_after (gen_jump (label), last);
6039 last = emit_barrier_after (last);
6040 emit_label (label);
6044 /* If we had calls to alloca, and this machine needs
6045 an accurate stack pointer to exit the function,
6046 insert some code to save and restore the stack pointer. */
6047 #ifdef EXIT_IGNORE_STACK
6048 if (! EXIT_IGNORE_STACK)
6049 #endif
6050 if (current_function_calls_alloca)
6052 rtx tem = 0;
6054 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6055 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6058 /* If scalar return value was computed in a pseudo-reg,
6059 copy that to the hard return register. */
6060 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6061 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6062 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6063 >= FIRST_PSEUDO_REGISTER))
6065 rtx real_decl_result;
6067 #ifdef FUNCTION_OUTGOING_VALUE
6068 real_decl_result
6069 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6070 current_function_decl);
6071 #else
6072 real_decl_result
6073 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6074 current_function_decl);
6075 #endif
6076 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6077 /* If this is a BLKmode structure being returned in registers, then use
6078 the mode computed in expand_return. */
6079 if (GET_MODE (real_decl_result) == BLKmode)
6080 PUT_MODE (real_decl_result,
6081 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6082 emit_move_insn (real_decl_result,
6083 DECL_RTL (DECL_RESULT (current_function_decl)));
6084 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6086 /* The delay slot scheduler assumes that current_function_return_rtx
6087 holds the hard register containing the return value, not a temporary
6088 pseudo. */
6089 current_function_return_rtx = real_decl_result;
6092 /* If returning a structure, arrange to return the address of the value
6093 in a place where debuggers expect to find it.
6095 If returning a structure PCC style,
6096 the caller also depends on this value.
6097 And current_function_returns_pcc_struct is not necessarily set. */
6098 if (current_function_returns_struct
6099 || current_function_returns_pcc_struct)
6101 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6102 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6103 #ifdef FUNCTION_OUTGOING_VALUE
6104 rtx outgoing
6105 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6106 current_function_decl);
6107 #else
6108 rtx outgoing
6109 = FUNCTION_VALUE (build_pointer_type (type),
6110 current_function_decl);
6111 #endif
6113 /* Mark this as a function return value so integrate will delete the
6114 assignment and USE below when inlining this function. */
6115 REG_FUNCTION_VALUE_P (outgoing) = 1;
6117 emit_move_insn (outgoing, value_address);
6118 use_variable (outgoing);
6121 /* Output a return insn if we are using one.
6122 Otherwise, let the rtl chain end here, to drop through
6123 into the epilogue. */
6125 #ifdef HAVE_return
6126 if (HAVE_return)
6128 emit_jump_insn (gen_return ());
6129 emit_barrier ();
6131 #endif
6133 /* Fix up any gotos that jumped out to the outermost
6134 binding level of the function.
6135 Must follow emitting RETURN_LABEL. */
6137 /* If you have any cleanups to do at this point,
6138 and they need to create temporary variables,
6139 then you will lose. */
6140 expand_fixups (get_insns ());
6143 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6145 static int *prologue;
6146 static int *epilogue;
6148 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6149 or a single insn). */
6151 static int *
6152 record_insns (insns)
6153 rtx insns;
6155 int *vec;
6157 if (GET_CODE (insns) == SEQUENCE)
6159 int len = XVECLEN (insns, 0);
6160 vec = (int *) oballoc ((len + 1) * sizeof (int));
6161 vec[len] = 0;
6162 while (--len >= 0)
6163 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6165 else
6167 vec = (int *) oballoc (2 * sizeof (int));
6168 vec[0] = INSN_UID (insns);
6169 vec[1] = 0;
6171 return vec;
6174 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6176 static int
6177 contains (insn, vec)
6178 rtx insn;
6179 int *vec;
6181 register int i, j;
6183 if (GET_CODE (insn) == INSN
6184 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6186 int count = 0;
6187 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6188 for (j = 0; vec[j]; j++)
6189 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6190 count++;
6191 return count;
6193 else
6195 for (j = 0; vec[j]; j++)
6196 if (INSN_UID (insn) == vec[j])
6197 return 1;
6199 return 0;
6202 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6203 this into place with notes indicating where the prologue ends and where
6204 the epilogue begins. Update the basic block information when possible. */
6206 void
6207 thread_prologue_and_epilogue_insns (f)
6208 rtx f;
6210 #ifdef HAVE_prologue
6211 if (HAVE_prologue)
6213 rtx head, seq, insn;
6215 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6216 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6217 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6218 seq = gen_prologue ();
6219 head = emit_insn_after (seq, f);
6221 /* Include the new prologue insns in the first block. Ignore them
6222 if they form a basic block unto themselves. */
6223 if (basic_block_head && n_basic_blocks
6224 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6225 basic_block_head[0] = NEXT_INSN (f);
6227 /* Retain a map of the prologue insns. */
6228 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6230 else
6231 #endif
6232 prologue = 0;
6234 #ifdef HAVE_epilogue
6235 if (HAVE_epilogue)
6237 rtx insn = get_last_insn ();
6238 rtx prev = prev_nonnote_insn (insn);
6240 /* If we end with a BARRIER, we don't need an epilogue. */
6241 if (! (prev && GET_CODE (prev) == BARRIER))
6243 rtx tail, seq, tem;
6244 rtx first_use = 0;
6245 rtx last_use = 0;
6247 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6248 epilogue insns, the USE insns at the end of a function,
6249 the jump insn that returns, and then a BARRIER. */
6251 /* Move the USE insns at the end of a function onto a list. */
6252 while (prev
6253 && GET_CODE (prev) == INSN
6254 && GET_CODE (PATTERN (prev)) == USE)
6256 tem = prev;
6257 prev = prev_nonnote_insn (prev);
6259 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6260 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6261 if (first_use)
6263 NEXT_INSN (tem) = first_use;
6264 PREV_INSN (first_use) = tem;
6266 first_use = tem;
6267 if (!last_use)
6268 last_use = tem;
6271 emit_barrier_after (insn);
6273 seq = gen_epilogue ();
6274 tail = emit_jump_insn_after (seq, insn);
6276 /* Insert the USE insns immediately before the return insn, which
6277 must be the first instruction before the final barrier. */
6278 if (first_use)
6280 tem = prev_nonnote_insn (get_last_insn ());
6281 NEXT_INSN (PREV_INSN (tem)) = first_use;
6282 PREV_INSN (first_use) = PREV_INSN (tem);
6283 PREV_INSN (tem) = last_use;
6284 NEXT_INSN (last_use) = tem;
6287 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6289 /* Include the new epilogue insns in the last block. Ignore
6290 them if they form a basic block unto themselves. */
6291 if (basic_block_end && n_basic_blocks
6292 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6293 basic_block_end[n_basic_blocks - 1] = tail;
6295 /* Retain a map of the epilogue insns. */
6296 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6297 return;
6300 #endif
6301 epilogue = 0;
6304 /* Reposition the prologue-end and epilogue-begin notes after instruction
6305 scheduling and delayed branch scheduling. */
6307 void
6308 reposition_prologue_and_epilogue_notes (f)
6309 rtx f;
6311 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6312 /* Reposition the prologue and epilogue notes. */
6313 if (n_basic_blocks)
6315 rtx next, prev;
6316 int len;
6318 if (prologue)
6320 register rtx insn, note = 0;
6322 /* Scan from the beginning until we reach the last prologue insn.
6323 We apparently can't depend on basic_block_{head,end} after
6324 reorg has run. */
6325 for (len = 0; prologue[len]; len++)
6327 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6329 if (GET_CODE (insn) == NOTE)
6331 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6332 note = insn;
6334 else if ((len -= contains (insn, prologue)) == 0)
6336 /* Find the prologue-end note if we haven't already, and
6337 move it to just after the last prologue insn. */
6338 if (note == 0)
6340 for (note = insn; note = NEXT_INSN (note);)
6341 if (GET_CODE (note) == NOTE
6342 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6343 break;
6345 next = NEXT_INSN (note);
6346 prev = PREV_INSN (note);
6347 if (prev)
6348 NEXT_INSN (prev) = next;
6349 if (next)
6350 PREV_INSN (next) = prev;
6351 add_insn_after (note, insn);
6356 if (epilogue)
6358 register rtx insn, note = 0;
6360 /* Scan from the end until we reach the first epilogue insn.
6361 We apparently can't depend on basic_block_{head,end} after
6362 reorg has run. */
6363 for (len = 0; epilogue[len]; len++)
6365 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6367 if (GET_CODE (insn) == NOTE)
6369 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6370 note = insn;
6372 else if ((len -= contains (insn, epilogue)) == 0)
6374 /* Find the epilogue-begin note if we haven't already, and
6375 move it to just before the first epilogue insn. */
6376 if (note == 0)
6378 for (note = insn; note = PREV_INSN (note);)
6379 if (GET_CODE (note) == NOTE
6380 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6381 break;
6383 next = NEXT_INSN (note);
6384 prev = PREV_INSN (note);
6385 if (prev)
6386 NEXT_INSN (prev) = next;
6387 if (next)
6388 PREV_INSN (next) = prev;
6389 add_insn_after (note, PREV_INSN (insn));
6394 #endif /* HAVE_prologue or HAVE_epilogue */