Import gcc-2.8.1.tar.bz2
[official-gcc.git] / gcc / function.c
blob17b70e554d4a1420e7914af95c97f424bcf3e7f2
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
63 #endif
65 /* Some systems use __main in a way incompatible with its use in gcc, in these
66 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
67 give the same symbol without quotes for an alternative entry point. You
68 must define both, or neither. */
69 #ifndef NAME__MAIN
70 #define NAME__MAIN "__main"
71 #define SYMBOL__MAIN __main
72 #endif
74 /* Round a value to the lowest integer less than it that is a multiple of
75 the required alignment. Avoid using division in case the value is
76 negative. Assume the alignment is a power of two. */
77 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
79 /* Similar, but round to the next highest integer that meets the
80 alignment. */
81 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
83 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
84 during rtl generation. If they are different register numbers, this is
85 always true. It may also be true if
86 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
87 generation. See fix_lexical_addr for details. */
89 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
90 #define NEED_SEPARATE_AP
91 #endif
93 /* Number of bytes of args popped by function being compiled on its return.
94 Zero if no bytes are to be popped.
95 May affect compilation of return insn or of function epilogue. */
97 int current_function_pops_args;
99 /* Nonzero if function being compiled needs to be given an address
100 where the value should be stored. */
102 int current_function_returns_struct;
104 /* Nonzero if function being compiled needs to
105 return the address of where it has put a structure value. */
107 int current_function_returns_pcc_struct;
109 /* Nonzero if function being compiled needs to be passed a static chain. */
111 int current_function_needs_context;
113 /* Nonzero if function being compiled can call setjmp. */
115 int current_function_calls_setjmp;
117 /* Nonzero if function being compiled can call longjmp. */
119 int current_function_calls_longjmp;
121 /* Nonzero if function being compiled receives nonlocal gotos
122 from nested functions. */
124 int current_function_has_nonlocal_label;
126 /* Nonzero if function being compiled has nonlocal gotos to parent
127 function. */
129 int current_function_has_nonlocal_goto;
131 /* Nonzero if function being compiled contains nested functions. */
133 int current_function_contains_functions;
135 /* Nonzero if the current function is a thunk (a lightweight function that
136 just adjusts one of its arguments and forwards to another function), so
137 we should try to cut corners where we can. */
138 int current_function_is_thunk;
140 /* Nonzero if function being compiled can call alloca,
141 either as a subroutine or builtin. */
143 int current_function_calls_alloca;
145 /* Nonzero if the current function returns a pointer type */
147 int current_function_returns_pointer;
149 /* If some insns can be deferred to the delay slots of the epilogue, the
150 delay list for them is recorded here. */
152 rtx current_function_epilogue_delay_list;
154 /* If function's args have a fixed size, this is that size, in bytes.
155 Otherwise, it is -1.
156 May affect compilation of return insn or of function epilogue. */
158 int current_function_args_size;
160 /* # bytes the prologue should push and pretend that the caller pushed them.
161 The prologue must do this, but only if parms can be passed in registers. */
163 int current_function_pretend_args_size;
165 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
166 defined, the needed space is pushed by the prologue. */
168 int current_function_outgoing_args_size;
170 /* This is the offset from the arg pointer to the place where the first
171 anonymous arg can be found, if there is one. */
173 rtx current_function_arg_offset_rtx;
175 /* Nonzero if current function uses varargs.h or equivalent.
176 Zero for functions that use stdarg.h. */
178 int current_function_varargs;
180 /* Nonzero if current function uses stdarg.h or equivalent.
181 Zero for functions that use varargs.h. */
183 int current_function_stdarg;
185 /* Quantities of various kinds of registers
186 used for the current function's args. */
188 CUMULATIVE_ARGS current_function_args_info;
190 /* Name of function now being compiled. */
192 char *current_function_name;
194 /* If non-zero, an RTL expression for the location at which the current
195 function returns its result. If the current function returns its
196 result in a register, current_function_return_rtx will always be
197 the hard register containing the result. */
199 rtx current_function_return_rtx;
201 /* Nonzero if the current function uses the constant pool. */
203 int current_function_uses_const_pool;
205 /* Nonzero if the current function uses pic_offset_table_rtx. */
206 int current_function_uses_pic_offset_table;
208 /* The arg pointer hard register, or the pseudo into which it was copied. */
209 rtx current_function_internal_arg_pointer;
211 /* The FUNCTION_DECL for an inline function currently being expanded. */
212 tree inline_function_decl;
214 /* Number of function calls seen so far in current function. */
216 int function_call_count;
218 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
219 (labels to which there can be nonlocal gotos from nested functions)
220 in this function. */
222 tree nonlocal_labels;
224 /* RTX for stack slot that holds the current handler for nonlocal gotos.
225 Zero when function does not have nonlocal labels. */
227 rtx nonlocal_goto_handler_slot;
229 /* RTX for stack slot that holds the stack pointer value to restore
230 for a nonlocal goto.
231 Zero when function does not have nonlocal labels. */
233 rtx nonlocal_goto_stack_level;
235 /* Label that will go on parm cleanup code, if any.
236 Jumping to this label runs cleanup code for parameters, if
237 such code must be run. Following this code is the logical return label. */
239 rtx cleanup_label;
241 /* Label that will go on function epilogue.
242 Jumping to this label serves as a "return" instruction
243 on machines which require execution of the epilogue on all returns. */
245 rtx return_label;
247 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
248 So we can mark them all live at the end of the function, if nonopt. */
249 rtx save_expr_regs;
251 /* List (chain of EXPR_LISTs) of all stack slots in this function.
252 Made for the sake of unshare_all_rtl. */
253 rtx stack_slot_list;
255 /* Chain of all RTL_EXPRs that have insns in them. */
256 tree rtl_expr_chain;
258 /* Label to jump back to for tail recursion, or 0 if we have
259 not yet needed one for this function. */
260 rtx tail_recursion_label;
262 /* Place after which to insert the tail_recursion_label if we need one. */
263 rtx tail_recursion_reentry;
265 /* Location at which to save the argument pointer if it will need to be
266 referenced. There are two cases where this is done: if nonlocal gotos
267 exist, or if vars stored at an offset from the argument pointer will be
268 needed by inner routines. */
270 rtx arg_pointer_save_area;
272 /* Offset to end of allocated area of stack frame.
273 If stack grows down, this is the address of the last stack slot allocated.
274 If stack grows up, this is the address for the next slot. */
275 HOST_WIDE_INT frame_offset;
277 /* List (chain of TREE_LISTs) of static chains for containing functions.
278 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
279 in an RTL_EXPR in the TREE_VALUE. */
280 static tree context_display;
282 /* List (chain of TREE_LISTs) of trampolines for nested functions.
283 The trampoline sets up the static chain and jumps to the function.
284 We supply the trampoline's address when the function's address is requested.
286 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
287 in an RTL_EXPR in the TREE_VALUE. */
288 static tree trampoline_list;
290 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
291 static rtx parm_birth_insn;
293 #if 0
294 /* Nonzero if a stack slot has been generated whose address is not
295 actually valid. It means that the generated rtl must all be scanned
296 to detect and correct the invalid addresses where they occur. */
297 static int invalid_stack_slot;
298 #endif
300 /* Last insn of those whose job was to put parms into their nominal homes. */
301 static rtx last_parm_insn;
303 /* 1 + last pseudo register number possibly used for loading a copy
304 of a parameter of this function. */
305 int max_parm_reg;
307 /* Vector indexed by REGNO, containing location on stack in which
308 to put the parm which is nominally in pseudo register REGNO,
309 if we discover that that parm must go in the stack. The highest
310 element in this vector is one less than MAX_PARM_REG, above. */
311 rtx *parm_reg_stack_loc;
313 /* Nonzero once virtual register instantiation has been done.
314 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
315 static int virtuals_instantiated;
317 /* These variables hold pointers to functions to
318 save and restore machine-specific data,
319 in push_function_context and pop_function_context. */
320 void (*save_machine_status) PROTO((struct function *));
321 void (*restore_machine_status) PROTO((struct function *));
323 /* Nonzero if we need to distinguish between the return value of this function
324 and the return value of a function called by this function. This helps
325 integrate.c */
327 extern int rtx_equal_function_value_matters;
328 extern tree sequence_rtl_expr;
330 /* In order to evaluate some expressions, such as function calls returning
331 structures in memory, we need to temporarily allocate stack locations.
332 We record each allocated temporary in the following structure.
334 Associated with each temporary slot is a nesting level. When we pop up
335 one level, all temporaries associated with the previous level are freed.
336 Normally, all temporaries are freed after the execution of the statement
337 in which they were created. However, if we are inside a ({...}) grouping,
338 the result may be in a temporary and hence must be preserved. If the
339 result could be in a temporary, we preserve it if we can determine which
340 one it is in. If we cannot determine which temporary may contain the
341 result, all temporaries are preserved. A temporary is preserved by
342 pretending it was allocated at the previous nesting level.
344 Automatic variables are also assigned temporary slots, at the nesting
345 level where they are defined. They are marked a "kept" so that
346 free_temp_slots will not free them. */
348 struct temp_slot
350 /* Points to next temporary slot. */
351 struct temp_slot *next;
352 /* The rtx to used to reference the slot. */
353 rtx slot;
354 /* The rtx used to represent the address if not the address of the
355 slot above. May be an EXPR_LIST if multiple addresses exist. */
356 rtx address;
357 /* The size, in units, of the slot. */
358 HOST_WIDE_INT size;
359 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
360 tree rtl_expr;
361 /* Non-zero if this temporary is currently in use. */
362 char in_use;
363 /* Non-zero if this temporary has its address taken. */
364 char addr_taken;
365 /* Nesting level at which this slot is being used. */
366 int level;
367 /* Non-zero if this should survive a call to free_temp_slots. */
368 int keep;
369 /* The offset of the slot from the frame_pointer, including extra space
370 for alignment. This info is for combine_temp_slots. */
371 HOST_WIDE_INT base_offset;
372 /* The size of the slot, including extra space for alignment. This
373 info is for combine_temp_slots. */
374 HOST_WIDE_INT full_size;
377 /* List of all temporaries allocated, both available and in use. */
379 struct temp_slot *temp_slots;
381 /* Current nesting level for temporaries. */
383 int temp_slot_level;
385 /* Current nesting level for variables in a block. */
387 int var_temp_slot_level;
389 /* The FUNCTION_DECL node for the current function. */
390 static tree this_function_decl;
392 /* Callinfo pointer for the current function. */
393 static rtx this_function_callinfo;
395 /* The label in the bytecode file of this function's actual bytecode.
396 Not an rtx. */
397 static char *this_function_bytecode;
399 /* The call description vector for the current function. */
400 static rtx this_function_calldesc;
402 /* Size of the local variables allocated for the current function. */
403 int local_vars_size;
405 /* Current depth of the bytecode evaluation stack. */
406 int stack_depth;
408 /* Maximum depth of the evaluation stack in this function. */
409 int max_stack_depth;
411 /* Current depth in statement expressions. */
412 static int stmt_expr_depth;
414 /* This structure is used to record MEMs or pseudos used to replace VAR, any
415 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
416 maintain this list in case two operands of an insn were required to match;
417 in that case we must ensure we use the same replacement. */
419 struct fixup_replacement
421 rtx old;
422 rtx new;
423 struct fixup_replacement *next;
426 /* Forward declarations. */
428 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
429 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
430 enum machine_mode, enum machine_mode,
431 int, int, int));
432 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
433 static struct fixup_replacement
434 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
435 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
436 rtx, int));
437 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
438 struct fixup_replacement **));
439 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
440 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
441 static rtx fixup_stack_1 PROTO((rtx, rtx));
442 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
443 static void instantiate_decls PROTO((tree, int));
444 static void instantiate_decls_1 PROTO((tree, int));
445 static void instantiate_decl PROTO((rtx, int, int));
446 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
447 static void delete_handlers PROTO((void));
448 static void pad_to_arg_alignment PROTO((struct args_size *, int));
449 static void pad_below PROTO((struct args_size *, enum machine_mode,
450 tree));
451 static tree round_down PROTO((tree, int));
452 static rtx round_trampoline_addr PROTO((rtx));
453 static tree blocks_nreverse PROTO((tree));
454 static int all_blocks PROTO((tree, tree *));
455 static int *record_insns PROTO((rtx));
456 static int contains PROTO((rtx, int *));
457 static void put_addressof_into_stack PROTO((rtx));
458 static void purge_addressof_1 PROTO((rtx *, rtx, int));
460 /* Pointer to chain of `struct function' for containing functions. */
461 struct function *outer_function_chain;
463 /* Given a function decl for a containing function,
464 return the `struct function' for it. */
466 struct function *
467 find_function_data (decl)
468 tree decl;
470 struct function *p;
472 for (p = outer_function_chain; p; p = p->next)
473 if (p->decl == decl)
474 return p;
476 abort ();
479 /* Save the current context for compilation of a nested function.
480 This is called from language-specific code.
481 The caller is responsible for saving any language-specific status,
482 since this function knows only about language-independent variables. */
484 void
485 push_function_context_to (context)
486 tree context;
488 struct function *p = (struct function *) xmalloc (sizeof (struct function));
490 p->next = outer_function_chain;
491 outer_function_chain = p;
493 p->name = current_function_name;
494 p->decl = current_function_decl;
495 p->pops_args = current_function_pops_args;
496 p->returns_struct = current_function_returns_struct;
497 p->returns_pcc_struct = current_function_returns_pcc_struct;
498 p->returns_pointer = current_function_returns_pointer;
499 p->needs_context = current_function_needs_context;
500 p->calls_setjmp = current_function_calls_setjmp;
501 p->calls_longjmp = current_function_calls_longjmp;
502 p->calls_alloca = current_function_calls_alloca;
503 p->has_nonlocal_label = current_function_has_nonlocal_label;
504 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
505 p->contains_functions = current_function_contains_functions;
506 p->is_thunk = current_function_is_thunk;
507 p->args_size = current_function_args_size;
508 p->pretend_args_size = current_function_pretend_args_size;
509 p->arg_offset_rtx = current_function_arg_offset_rtx;
510 p->varargs = current_function_varargs;
511 p->stdarg = current_function_stdarg;
512 p->uses_const_pool = current_function_uses_const_pool;
513 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
514 p->internal_arg_pointer = current_function_internal_arg_pointer;
515 p->max_parm_reg = max_parm_reg;
516 p->parm_reg_stack_loc = parm_reg_stack_loc;
517 p->outgoing_args_size = current_function_outgoing_args_size;
518 p->return_rtx = current_function_return_rtx;
519 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
520 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
521 p->nonlocal_labels = nonlocal_labels;
522 p->cleanup_label = cleanup_label;
523 p->return_label = return_label;
524 p->save_expr_regs = save_expr_regs;
525 p->stack_slot_list = stack_slot_list;
526 p->parm_birth_insn = parm_birth_insn;
527 p->frame_offset = frame_offset;
528 p->tail_recursion_label = tail_recursion_label;
529 p->tail_recursion_reentry = tail_recursion_reentry;
530 p->arg_pointer_save_area = arg_pointer_save_area;
531 p->rtl_expr_chain = rtl_expr_chain;
532 p->last_parm_insn = last_parm_insn;
533 p->context_display = context_display;
534 p->trampoline_list = trampoline_list;
535 p->function_call_count = function_call_count;
536 p->temp_slots = temp_slots;
537 p->temp_slot_level = temp_slot_level;
538 p->target_temp_slot_level = target_temp_slot_level;
539 p->var_temp_slot_level = var_temp_slot_level;
540 p->fixup_var_refs_queue = 0;
541 p->epilogue_delay_list = current_function_epilogue_delay_list;
542 p->args_info = current_function_args_info;
544 save_tree_status (p, context);
545 save_storage_status (p);
546 save_emit_status (p);
547 save_expr_status (p);
548 save_stmt_status (p);
549 save_varasm_status (p, context);
550 if (save_machine_status)
551 (*save_machine_status) (p);
553 init_emit ();
556 void
557 push_function_context ()
559 push_function_context_to (current_function_decl);
562 /* Restore the last saved context, at the end of a nested function.
563 This function is called from language-specific code. */
565 void
566 pop_function_context_from (context)
567 tree context;
569 struct function *p = outer_function_chain;
570 struct var_refs_queue *queue;
572 outer_function_chain = p->next;
574 current_function_contains_functions
575 = p->contains_functions || p->inline_obstacks
576 || context == current_function_decl;
577 current_function_name = p->name;
578 current_function_decl = p->decl;
579 current_function_pops_args = p->pops_args;
580 current_function_returns_struct = p->returns_struct;
581 current_function_returns_pcc_struct = p->returns_pcc_struct;
582 current_function_returns_pointer = p->returns_pointer;
583 current_function_needs_context = p->needs_context;
584 current_function_calls_setjmp = p->calls_setjmp;
585 current_function_calls_longjmp = p->calls_longjmp;
586 current_function_calls_alloca = p->calls_alloca;
587 current_function_has_nonlocal_label = p->has_nonlocal_label;
588 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
589 current_function_is_thunk = p->is_thunk;
590 current_function_args_size = p->args_size;
591 current_function_pretend_args_size = p->pretend_args_size;
592 current_function_arg_offset_rtx = p->arg_offset_rtx;
593 current_function_varargs = p->varargs;
594 current_function_stdarg = p->stdarg;
595 current_function_uses_const_pool = p->uses_const_pool;
596 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
597 current_function_internal_arg_pointer = p->internal_arg_pointer;
598 max_parm_reg = p->max_parm_reg;
599 parm_reg_stack_loc = p->parm_reg_stack_loc;
600 current_function_outgoing_args_size = p->outgoing_args_size;
601 current_function_return_rtx = p->return_rtx;
602 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
603 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
604 nonlocal_labels = p->nonlocal_labels;
605 cleanup_label = p->cleanup_label;
606 return_label = p->return_label;
607 save_expr_regs = p->save_expr_regs;
608 stack_slot_list = p->stack_slot_list;
609 parm_birth_insn = p->parm_birth_insn;
610 frame_offset = p->frame_offset;
611 tail_recursion_label = p->tail_recursion_label;
612 tail_recursion_reentry = p->tail_recursion_reentry;
613 arg_pointer_save_area = p->arg_pointer_save_area;
614 rtl_expr_chain = p->rtl_expr_chain;
615 last_parm_insn = p->last_parm_insn;
616 context_display = p->context_display;
617 trampoline_list = p->trampoline_list;
618 function_call_count = p->function_call_count;
619 temp_slots = p->temp_slots;
620 temp_slot_level = p->temp_slot_level;
621 target_temp_slot_level = p->target_temp_slot_level;
622 var_temp_slot_level = p->var_temp_slot_level;
623 current_function_epilogue_delay_list = p->epilogue_delay_list;
624 reg_renumber = 0;
625 current_function_args_info = p->args_info;
627 restore_tree_status (p, context);
628 restore_storage_status (p);
629 restore_expr_status (p);
630 restore_emit_status (p);
631 restore_stmt_status (p);
632 restore_varasm_status (p);
634 if (restore_machine_status)
635 (*restore_machine_status) (p);
637 /* Finish doing put_var_into_stack for any of our variables
638 which became addressable during the nested function. */
639 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
640 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
642 free (p);
644 /* Reset variables that have known state during rtx generation. */
645 rtx_equal_function_value_matters = 1;
646 virtuals_instantiated = 0;
649 void pop_function_context ()
651 pop_function_context_from (current_function_decl);
654 /* Allocate fixed slots in the stack frame of the current function. */
656 /* Return size needed for stack frame based on slots so far allocated.
657 This size counts from zero. It is not rounded to STACK_BOUNDARY;
658 the caller may have to do that. */
660 HOST_WIDE_INT
661 get_frame_size ()
663 #ifdef FRAME_GROWS_DOWNWARD
664 return -frame_offset;
665 #else
666 return frame_offset;
667 #endif
670 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
671 with machine mode MODE.
673 ALIGN controls the amount of alignment for the address of the slot:
674 0 means according to MODE,
675 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
676 positive specifies alignment boundary in bits.
678 We do not round to stack_boundary here. */
681 assign_stack_local (mode, size, align)
682 enum machine_mode mode;
683 HOST_WIDE_INT size;
684 int align;
686 register rtx x, addr;
687 int bigend_correction = 0;
688 int alignment;
690 if (align == 0)
692 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
693 if (mode == BLKmode)
694 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
696 else if (align == -1)
698 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
699 size = CEIL_ROUND (size, alignment);
701 else
702 alignment = align / BITS_PER_UNIT;
704 /* Round frame offset to that alignment.
705 We must be careful here, since FRAME_OFFSET might be negative and
706 division with a negative dividend isn't as well defined as we might
707 like. So we instead assume that ALIGNMENT is a power of two and
708 use logical operations which are unambiguous. */
709 #ifdef FRAME_GROWS_DOWNWARD
710 frame_offset = FLOOR_ROUND (frame_offset, alignment);
711 #else
712 frame_offset = CEIL_ROUND (frame_offset, alignment);
713 #endif
715 /* On a big-endian machine, if we are allocating more space than we will use,
716 use the least significant bytes of those that are allocated. */
717 if (BYTES_BIG_ENDIAN && mode != BLKmode)
718 bigend_correction = size - GET_MODE_SIZE (mode);
720 #ifdef FRAME_GROWS_DOWNWARD
721 frame_offset -= size;
722 #endif
724 /* If we have already instantiated virtual registers, return the actual
725 address relative to the frame pointer. */
726 if (virtuals_instantiated)
727 addr = plus_constant (frame_pointer_rtx,
728 (frame_offset + bigend_correction
729 + STARTING_FRAME_OFFSET));
730 else
731 addr = plus_constant (virtual_stack_vars_rtx,
732 frame_offset + bigend_correction);
734 #ifndef FRAME_GROWS_DOWNWARD
735 frame_offset += size;
736 #endif
738 x = gen_rtx (MEM, mode, addr);
740 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
742 return x;
745 /* Assign a stack slot in a containing function.
746 First three arguments are same as in preceding function.
747 The last argument specifies the function to allocate in. */
750 assign_outer_stack_local (mode, size, align, function)
751 enum machine_mode mode;
752 HOST_WIDE_INT size;
753 int align;
754 struct function *function;
756 register rtx x, addr;
757 int bigend_correction = 0;
758 int alignment;
760 /* Allocate in the memory associated with the function in whose frame
761 we are assigning. */
762 push_obstacks (function->function_obstack,
763 function->function_maybepermanent_obstack);
765 if (align == 0)
767 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
768 if (mode == BLKmode)
769 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
771 else if (align == -1)
773 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
774 size = CEIL_ROUND (size, alignment);
776 else
777 alignment = align / BITS_PER_UNIT;
779 /* Round frame offset to that alignment. */
780 #ifdef FRAME_GROWS_DOWNWARD
781 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
782 #else
783 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
784 #endif
786 /* On a big-endian machine, if we are allocating more space than we will use,
787 use the least significant bytes of those that are allocated. */
788 if (BYTES_BIG_ENDIAN && mode != BLKmode)
789 bigend_correction = size - GET_MODE_SIZE (mode);
791 #ifdef FRAME_GROWS_DOWNWARD
792 function->frame_offset -= size;
793 #endif
794 addr = plus_constant (virtual_stack_vars_rtx,
795 function->frame_offset + bigend_correction);
796 #ifndef FRAME_GROWS_DOWNWARD
797 function->frame_offset += size;
798 #endif
800 x = gen_rtx (MEM, mode, addr);
802 function->stack_slot_list
803 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
805 pop_obstacks ();
807 return x;
810 /* Allocate a temporary stack slot and record it for possible later
811 reuse.
813 MODE is the machine mode to be given to the returned rtx.
815 SIZE is the size in units of the space required. We do no rounding here
816 since assign_stack_local will do any required rounding.
818 KEEP is 1 if this slot is to be retained after a call to
819 free_temp_slots. Automatic variables for a block are allocated
820 with this flag. KEEP is 2 if we allocate a longer term temporary,
821 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
822 if we are to allocate something at an inner level to be treated as
823 a variable in the block (e.g., a SAVE_EXPR). */
826 assign_stack_temp (mode, size, keep)
827 enum machine_mode mode;
828 HOST_WIDE_INT size;
829 int keep;
831 struct temp_slot *p, *best_p = 0;
833 /* If SIZE is -1 it means that somebody tried to allocate a temporary
834 of a variable size. */
835 if (size == -1)
836 abort ();
838 /* First try to find an available, already-allocated temporary that is the
839 exact size we require. */
840 for (p = temp_slots; p; p = p->next)
841 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
842 break;
844 /* If we didn't find, one, try one that is larger than what we want. We
845 find the smallest such. */
846 if (p == 0)
847 for (p = temp_slots; p; p = p->next)
848 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
849 && (best_p == 0 || best_p->size > p->size))
850 best_p = p;
852 /* Make our best, if any, the one to use. */
853 if (best_p)
855 /* If there are enough aligned bytes left over, make them into a new
856 temp_slot so that the extra bytes don't get wasted. Do this only
857 for BLKmode slots, so that we can be sure of the alignment. */
858 if (GET_MODE (best_p->slot) == BLKmode)
860 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
861 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
863 if (best_p->size - rounded_size >= alignment)
865 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
866 p->in_use = p->addr_taken = 0;
867 p->size = best_p->size - rounded_size;
868 p->base_offset = best_p->base_offset + rounded_size;
869 p->full_size = best_p->full_size - rounded_size;
870 p->slot = gen_rtx (MEM, BLKmode,
871 plus_constant (XEXP (best_p->slot, 0),
872 rounded_size));
873 p->address = 0;
874 p->rtl_expr = 0;
875 p->next = temp_slots;
876 temp_slots = p;
878 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
879 stack_slot_list);
881 best_p->size = rounded_size;
882 best_p->full_size = rounded_size;
886 p = best_p;
889 /* If we still didn't find one, make a new temporary. */
890 if (p == 0)
892 HOST_WIDE_INT frame_offset_old = frame_offset;
894 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
896 /* If the temp slot mode doesn't indicate the alignment,
897 use the largest possible, so no one will be disappointed. */
898 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
900 /* The following slot size computation is necessary because we don't
901 know the actual size of the temporary slot until assign_stack_local
902 has performed all the frame alignment and size rounding for the
903 requested temporary. Note that extra space added for alignment
904 can be either above or below this stack slot depending on which
905 way the frame grows. We include the extra space if and only if it
906 is above this slot. */
907 #ifdef FRAME_GROWS_DOWNWARD
908 p->size = frame_offset_old - frame_offset;
909 #else
910 p->size = size;
911 #endif
913 /* Now define the fields used by combine_temp_slots. */
914 #ifdef FRAME_GROWS_DOWNWARD
915 p->base_offset = frame_offset;
916 p->full_size = frame_offset_old - frame_offset;
917 #else
918 p->base_offset = frame_offset_old;
919 p->full_size = frame_offset - frame_offset_old;
920 #endif
921 p->address = 0;
922 p->next = temp_slots;
923 temp_slots = p;
926 p->in_use = 1;
927 p->addr_taken = 0;
928 p->rtl_expr = sequence_rtl_expr;
930 if (keep == 2)
932 p->level = target_temp_slot_level;
933 p->keep = 0;
935 else if (keep == 3)
937 p->level = var_temp_slot_level;
938 p->keep = 0;
940 else
942 p->level = temp_slot_level;
943 p->keep = keep;
946 /* We may be reusing an old slot, so clear any MEM flags that may have been
947 set from before. */
948 RTX_UNCHANGING_P (p->slot) = 0;
949 MEM_IN_STRUCT_P (p->slot) = 0;
950 return p->slot;
953 /* Assign a temporary of given TYPE.
954 KEEP is as for assign_stack_temp.
955 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
956 it is 0 if a register is OK.
957 DONT_PROMOTE is 1 if we should not promote values in register
958 to wider modes. */
961 assign_temp (type, keep, memory_required, dont_promote)
962 tree type;
963 int keep;
964 int memory_required;
965 int dont_promote;
967 enum machine_mode mode = TYPE_MODE (type);
968 int unsignedp = TREE_UNSIGNED (type);
970 if (mode == BLKmode || memory_required)
972 HOST_WIDE_INT size = int_size_in_bytes (type);
973 rtx tmp;
975 /* Unfortunately, we don't yet know how to allocate variable-sized
976 temporaries. However, sometimes we have a fixed upper limit on
977 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
978 instead. This is the case for Chill variable-sized strings. */
979 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
980 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
981 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
982 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
984 tmp = assign_stack_temp (mode, size, keep);
985 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
986 return tmp;
989 #ifndef PROMOTE_FOR_CALL_ONLY
990 if (! dont_promote)
991 mode = promote_mode (type, mode, &unsignedp, 0);
992 #endif
994 return gen_reg_rtx (mode);
997 /* Combine temporary stack slots which are adjacent on the stack.
999 This allows for better use of already allocated stack space. This is only
1000 done for BLKmode slots because we can be sure that we won't have alignment
1001 problems in this case. */
1003 void
1004 combine_temp_slots ()
1006 struct temp_slot *p, *q;
1007 struct temp_slot *prev_p, *prev_q;
1008 int num_slots;
1010 /* If there are a lot of temp slots, don't do anything unless
1011 high levels of optimizaton. */
1012 if (! flag_expensive_optimizations)
1013 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1014 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1015 return;
1017 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1019 int delete_p = 0;
1021 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1022 for (q = p->next, prev_q = p; q; q = prev_q->next)
1024 int delete_q = 0;
1025 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1027 if (p->base_offset + p->full_size == q->base_offset)
1029 /* Q comes after P; combine Q into P. */
1030 p->size += q->size;
1031 p->full_size += q->full_size;
1032 delete_q = 1;
1034 else if (q->base_offset + q->full_size == p->base_offset)
1036 /* P comes after Q; combine P into Q. */
1037 q->size += p->size;
1038 q->full_size += p->full_size;
1039 delete_p = 1;
1040 break;
1043 /* Either delete Q or advance past it. */
1044 if (delete_q)
1045 prev_q->next = q->next;
1046 else
1047 prev_q = q;
1049 /* Either delete P or advance past it. */
1050 if (delete_p)
1052 if (prev_p)
1053 prev_p->next = p->next;
1054 else
1055 temp_slots = p->next;
1057 else
1058 prev_p = p;
1062 /* Find the temp slot corresponding to the object at address X. */
1064 static struct temp_slot *
1065 find_temp_slot_from_address (x)
1066 rtx x;
1068 struct temp_slot *p;
1069 rtx next;
1071 for (p = temp_slots; p; p = p->next)
1073 if (! p->in_use)
1074 continue;
1076 else if (XEXP (p->slot, 0) == x
1077 || p->address == x
1078 || (GET_CODE (x) == PLUS
1079 && XEXP (x, 0) == virtual_stack_vars_rtx
1080 && GET_CODE (XEXP (x, 1)) == CONST_INT
1081 && INTVAL (XEXP (x, 1)) >= p->base_offset
1082 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1083 return p;
1085 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1086 for (next = p->address; next; next = XEXP (next, 1))
1087 if (XEXP (next, 0) == x)
1088 return p;
1091 return 0;
1094 /* Indicate that NEW is an alternate way of referring to the temp slot
1095 that previously was known by OLD. */
1097 void
1098 update_temp_slot_address (old, new)
1099 rtx old, new;
1101 struct temp_slot *p = find_temp_slot_from_address (old);
1103 /* If none, return. Else add NEW as an alias. */
1104 if (p == 0)
1105 return;
1106 else if (p->address == 0)
1107 p->address = new;
1108 else
1110 if (GET_CODE (p->address) != EXPR_LIST)
1111 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1113 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1117 /* If X could be a reference to a temporary slot, mark the fact that its
1118 address was taken. */
1120 void
1121 mark_temp_addr_taken (x)
1122 rtx x;
1124 struct temp_slot *p;
1126 if (x == 0)
1127 return;
1129 /* If X is not in memory or is at a constant address, it cannot be in
1130 a temporary slot. */
1131 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1132 return;
1134 p = find_temp_slot_from_address (XEXP (x, 0));
1135 if (p != 0)
1136 p->addr_taken = 1;
1139 /* If X could be a reference to a temporary slot, mark that slot as
1140 belonging to the to one level higher than the current level. If X
1141 matched one of our slots, just mark that one. Otherwise, we can't
1142 easily predict which it is, so upgrade all of them. Kept slots
1143 need not be touched.
1145 This is called when an ({...}) construct occurs and a statement
1146 returns a value in memory. */
1148 void
1149 preserve_temp_slots (x)
1150 rtx x;
1152 struct temp_slot *p = 0;
1154 /* If there is no result, we still might have some objects whose address
1155 were taken, so we need to make sure they stay around. */
1156 if (x == 0)
1158 for (p = temp_slots; p; p = p->next)
1159 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1160 p->level--;
1162 return;
1165 /* If X is a register that is being used as a pointer, see if we have
1166 a temporary slot we know it points to. To be consistent with
1167 the code below, we really should preserve all non-kept slots
1168 if we can't find a match, but that seems to be much too costly. */
1169 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1170 p = find_temp_slot_from_address (x);
1172 /* If X is not in memory or is at a constant address, it cannot be in
1173 a temporary slot, but it can contain something whose address was
1174 taken. */
1175 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1177 for (p = temp_slots; p; p = p->next)
1178 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1179 p->level--;
1181 return;
1184 /* First see if we can find a match. */
1185 if (p == 0)
1186 p = find_temp_slot_from_address (XEXP (x, 0));
1188 if (p != 0)
1190 /* Move everything at our level whose address was taken to our new
1191 level in case we used its address. */
1192 struct temp_slot *q;
1194 if (p->level == temp_slot_level)
1196 for (q = temp_slots; q; q = q->next)
1197 if (q != p && q->addr_taken && q->level == p->level)
1198 q->level--;
1200 p->level--;
1201 p->addr_taken = 0;
1203 return;
1206 /* Otherwise, preserve all non-kept slots at this level. */
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1209 p->level--;
1212 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1213 with that RTL_EXPR, promote it into a temporary slot at the present
1214 level so it will not be freed when we free slots made in the
1215 RTL_EXPR. */
1217 void
1218 preserve_rtl_expr_result (x)
1219 rtx x;
1221 struct temp_slot *p;
1223 /* If X is not in memory or is at a constant address, it cannot be in
1224 a temporary slot. */
1225 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1226 return;
1228 /* If we can find a match, move it to our level unless it is already at
1229 an upper level. */
1230 p = find_temp_slot_from_address (XEXP (x, 0));
1231 if (p != 0)
1233 p->level = MIN (p->level, temp_slot_level);
1234 p->rtl_expr = 0;
1237 return;
1240 /* Free all temporaries used so far. This is normally called at the end
1241 of generating code for a statement. Don't free any temporaries
1242 currently in use for an RTL_EXPR that hasn't yet been emitted.
1243 We could eventually do better than this since it can be reused while
1244 generating the same RTL_EXPR, but this is complex and probably not
1245 worthwhile. */
1247 void
1248 free_temp_slots ()
1250 struct temp_slot *p;
1252 for (p = temp_slots; p; p = p->next)
1253 if (p->in_use && p->level == temp_slot_level && ! p->keep
1254 && p->rtl_expr == 0)
1255 p->in_use = 0;
1257 combine_temp_slots ();
1260 /* Free all temporary slots used in T, an RTL_EXPR node. */
1262 void
1263 free_temps_for_rtl_expr (t)
1264 tree t;
1266 struct temp_slot *p;
1268 for (p = temp_slots; p; p = p->next)
1269 if (p->rtl_expr == t)
1270 p->in_use = 0;
1272 combine_temp_slots ();
1275 /* Mark all temporaries ever allocated in this function as not suitable
1276 for reuse until the current level is exited. */
1278 void
1279 mark_all_temps_used ()
1281 struct temp_slot *p;
1283 for (p = temp_slots; p; p = p->next)
1285 p->in_use = p->keep = 1;
1286 p->level = MIN (p->level, temp_slot_level);
1290 /* Push deeper into the nesting level for stack temporaries. */
1292 void
1293 push_temp_slots ()
1295 temp_slot_level++;
1298 /* Likewise, but save the new level as the place to allocate variables
1299 for blocks. */
1301 void
1302 push_temp_slots_for_block ()
1304 push_temp_slots ();
1306 var_temp_slot_level = temp_slot_level;
1309 /* Pop a temporary nesting level. All slots in use in the current level
1310 are freed. */
1312 void
1313 pop_temp_slots ()
1315 struct temp_slot *p;
1317 for (p = temp_slots; p; p = p->next)
1318 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1319 p->in_use = 0;
1321 combine_temp_slots ();
1323 temp_slot_level--;
1326 /* Initialize temporary slots. */
1328 void
1329 init_temp_slots ()
1331 /* We have not allocated any temporaries yet. */
1332 temp_slots = 0;
1333 temp_slot_level = 0;
1334 var_temp_slot_level = 0;
1335 target_temp_slot_level = 0;
1338 /* Retroactively move an auto variable from a register to a stack slot.
1339 This is done when an address-reference to the variable is seen. */
1341 void
1342 put_var_into_stack (decl)
1343 tree decl;
1345 register rtx reg;
1346 enum machine_mode promoted_mode, decl_mode;
1347 struct function *function = 0;
1348 tree context;
1349 int can_use_addressof;
1351 if (output_bytecode)
1352 return;
1354 context = decl_function_context (decl);
1356 /* Get the current rtl used for this object and it's original mode. */
1357 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1359 /* No need to do anything if decl has no rtx yet
1360 since in that case caller is setting TREE_ADDRESSABLE
1361 and a stack slot will be assigned when the rtl is made. */
1362 if (reg == 0)
1363 return;
1365 /* Get the declared mode for this object. */
1366 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1367 : DECL_MODE (decl));
1368 /* Get the mode it's actually stored in. */
1369 promoted_mode = GET_MODE (reg);
1371 /* If this variable comes from an outer function,
1372 find that function's saved context. */
1373 if (context != current_function_decl && context != inline_function_decl)
1374 for (function = outer_function_chain; function; function = function->next)
1375 if (function->decl == context)
1376 break;
1378 /* If this is a variable-size object with a pseudo to address it,
1379 put that pseudo into the stack, if the var is nonlocal. */
1380 if (DECL_NONLOCAL (decl)
1381 && GET_CODE (reg) == MEM
1382 && GET_CODE (XEXP (reg, 0)) == REG
1383 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1385 reg = XEXP (reg, 0);
1386 decl_mode = promoted_mode = GET_MODE (reg);
1389 can_use_addressof
1390 = (function == 0
1391 && optimize > 0
1392 /* FIXME make it work for promoted modes too */
1393 && decl_mode == promoted_mode
1394 #ifdef NON_SAVING_SETJMP
1395 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1396 #endif
1399 /* If we can't use ADDRESSOF, make sure we see through one we already
1400 generated. */
1401 if (! can_use_addressof && GET_CODE (reg) == MEM
1402 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1403 reg = XEXP (XEXP (reg, 0), 0);
1405 /* Now we should have a value that resides in one or more pseudo regs. */
1407 if (GET_CODE (reg) == REG)
1409 /* If this variable lives in the current function and we don't need
1410 to put things in the stack for the sake of setjmp, try to keep it
1411 in a register until we know we actually need the address. */
1412 if (can_use_addressof)
1413 gen_mem_addressof (reg, decl);
1414 else
1415 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1416 promoted_mode, decl_mode,
1417 TREE_SIDE_EFFECTS (decl), 0,
1418 TREE_USED (decl)
1419 || DECL_INITIAL (decl) != 0);
1421 else if (GET_CODE (reg) == CONCAT)
1423 /* A CONCAT contains two pseudos; put them both in the stack.
1424 We do it so they end up consecutive. */
1425 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1426 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1427 #ifdef FRAME_GROWS_DOWNWARD
1428 /* Since part 0 should have a lower address, do it second. */
1429 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1430 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1431 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1432 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1433 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1434 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1435 #else
1436 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1437 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1438 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1439 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1440 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1441 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1442 #endif
1444 /* Change the CONCAT into a combined MEM for both parts. */
1445 PUT_CODE (reg, MEM);
1446 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1448 /* The two parts are in memory order already.
1449 Use the lower parts address as ours. */
1450 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1451 /* Prevent sharing of rtl that might lose. */
1452 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1453 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1455 else
1456 return;
1458 if (flag_check_memory_usage)
1459 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1460 XEXP (reg, 0), ptr_mode,
1461 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1462 TYPE_MODE (sizetype),
1463 GEN_INT (MEMORY_USE_RW),
1464 TYPE_MODE (integer_type_node));
1467 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1468 into the stack frame of FUNCTION (0 means the current function).
1469 DECL_MODE is the machine mode of the user-level data type.
1470 PROMOTED_MODE is the machine mode of the register.
1471 VOLATILE_P is nonzero if this is for a "volatile" decl.
1472 USED_P is nonzero if this reg might have already been used in an insn. */
1474 static void
1475 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1476 original_regno, used_p)
1477 struct function *function;
1478 rtx reg;
1479 tree type;
1480 enum machine_mode promoted_mode, decl_mode;
1481 int volatile_p;
1482 int original_regno;
1483 int used_p;
1485 rtx new = 0;
1486 int regno = original_regno;
1488 if (regno == 0)
1489 regno = REGNO (reg);
1491 if (function)
1493 if (regno < function->max_parm_reg)
1494 new = function->parm_reg_stack_loc[regno];
1495 if (new == 0)
1496 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1497 0, function);
1499 else
1501 if (regno < max_parm_reg)
1502 new = parm_reg_stack_loc[regno];
1503 if (new == 0)
1504 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1507 PUT_MODE (reg, decl_mode);
1508 XEXP (reg, 0) = XEXP (new, 0);
1509 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1510 MEM_VOLATILE_P (reg) = volatile_p;
1511 PUT_CODE (reg, MEM);
1513 /* If this is a memory ref that contains aggregate components,
1514 mark it as such for cse and loop optimize. */
1515 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1517 /* Now make sure that all refs to the variable, previously made
1518 when it was a register, are fixed up to be valid again. */
1520 if (used_p && function != 0)
1522 struct var_refs_queue *temp;
1524 /* Variable is inherited; fix it up when we get back to its function. */
1525 push_obstacks (function->function_obstack,
1526 function->function_maybepermanent_obstack);
1528 /* See comment in restore_tree_status in tree.c for why this needs to be
1529 on saveable obstack. */
1530 temp
1531 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1532 temp->modified = reg;
1533 temp->promoted_mode = promoted_mode;
1534 temp->unsignedp = TREE_UNSIGNED (type);
1535 temp->next = function->fixup_var_refs_queue;
1536 function->fixup_var_refs_queue = temp;
1537 pop_obstacks ();
1539 else if (used_p)
1540 /* Variable is local; fix it up now. */
1541 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1544 static void
1545 fixup_var_refs (var, promoted_mode, unsignedp)
1546 rtx var;
1547 enum machine_mode promoted_mode;
1548 int unsignedp;
1550 tree pending;
1551 rtx first_insn = get_insns ();
1552 struct sequence_stack *stack = sequence_stack;
1553 tree rtl_exps = rtl_expr_chain;
1555 /* Must scan all insns for stack-refs that exceed the limit. */
1556 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1558 /* Scan all pending sequences too. */
1559 for (; stack; stack = stack->next)
1561 push_to_sequence (stack->first);
1562 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1563 stack->first, stack->next != 0);
1564 /* Update remembered end of sequence
1565 in case we added an insn at the end. */
1566 stack->last = get_last_insn ();
1567 end_sequence ();
1570 /* Scan all waiting RTL_EXPRs too. */
1571 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1573 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1574 if (seq != const0_rtx && seq != 0)
1576 push_to_sequence (seq);
1577 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1578 end_sequence ();
1583 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1584 some part of an insn. Return a struct fixup_replacement whose OLD
1585 value is equal to X. Allocate a new structure if no such entry exists. */
1587 static struct fixup_replacement *
1588 find_fixup_replacement (replacements, x)
1589 struct fixup_replacement **replacements;
1590 rtx x;
1592 struct fixup_replacement *p;
1594 /* See if we have already replaced this. */
1595 for (p = *replacements; p && p->old != x; p = p->next)
1598 if (p == 0)
1600 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1601 p->old = x;
1602 p->new = 0;
1603 p->next = *replacements;
1604 *replacements = p;
1607 return p;
1610 /* Scan the insn-chain starting with INSN for refs to VAR
1611 and fix them up. TOPLEVEL is nonzero if this chain is the
1612 main chain of insns for the current function. */
1614 static void
1615 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1616 rtx var;
1617 enum machine_mode promoted_mode;
1618 int unsignedp;
1619 rtx insn;
1620 int toplevel;
1622 rtx call_dest = 0;
1624 while (insn)
1626 rtx next = NEXT_INSN (insn);
1627 rtx note;
1628 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1630 /* If this is a CLOBBER of VAR, delete it.
1632 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1633 and REG_RETVAL notes too. */
1634 if (GET_CODE (PATTERN (insn)) == CLOBBER
1635 && XEXP (PATTERN (insn), 0) == var)
1637 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1638 /* The REG_LIBCALL note will go away since we are going to
1639 turn INSN into a NOTE, so just delete the
1640 corresponding REG_RETVAL note. */
1641 remove_note (XEXP (note, 0),
1642 find_reg_note (XEXP (note, 0), REG_RETVAL,
1643 NULL_RTX));
1645 /* In unoptimized compilation, we shouldn't call delete_insn
1646 except in jump.c doing warnings. */
1647 PUT_CODE (insn, NOTE);
1648 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1649 NOTE_SOURCE_FILE (insn) = 0;
1652 /* The insn to load VAR from a home in the arglist
1653 is now a no-op. When we see it, just delete it. */
1654 else if (toplevel
1655 && GET_CODE (PATTERN (insn)) == SET
1656 && SET_DEST (PATTERN (insn)) == var
1657 /* If this represents the result of an insn group,
1658 don't delete the insn. */
1659 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1660 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1662 /* In unoptimized compilation, we shouldn't call delete_insn
1663 except in jump.c doing warnings. */
1664 PUT_CODE (insn, NOTE);
1665 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1666 NOTE_SOURCE_FILE (insn) = 0;
1667 if (insn == last_parm_insn)
1668 last_parm_insn = PREV_INSN (next);
1670 else
1672 struct fixup_replacement *replacements = 0;
1673 rtx next_insn = NEXT_INSN (insn);
1675 if (SMALL_REGISTER_CLASSES)
1677 /* If the insn that copies the results of a CALL_INSN
1678 into a pseudo now references VAR, we have to use an
1679 intermediate pseudo since we want the life of the
1680 return value register to be only a single insn.
1682 If we don't use an intermediate pseudo, such things as
1683 address computations to make the address of VAR valid
1684 if it is not can be placed between the CALL_INSN and INSN.
1686 To make sure this doesn't happen, we record the destination
1687 of the CALL_INSN and see if the next insn uses both that
1688 and VAR. */
1690 if (call_dest != 0 && GET_CODE (insn) == INSN
1691 && reg_mentioned_p (var, PATTERN (insn))
1692 && reg_mentioned_p (call_dest, PATTERN (insn)))
1694 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1696 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1698 PATTERN (insn) = replace_rtx (PATTERN (insn),
1699 call_dest, temp);
1702 if (GET_CODE (insn) == CALL_INSN
1703 && GET_CODE (PATTERN (insn)) == SET)
1704 call_dest = SET_DEST (PATTERN (insn));
1705 else if (GET_CODE (insn) == CALL_INSN
1706 && GET_CODE (PATTERN (insn)) == PARALLEL
1707 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1708 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1709 else
1710 call_dest = 0;
1713 /* See if we have to do anything to INSN now that VAR is in
1714 memory. If it needs to be loaded into a pseudo, use a single
1715 pseudo for the entire insn in case there is a MATCH_DUP
1716 between two operands. We pass a pointer to the head of
1717 a list of struct fixup_replacements. If fixup_var_refs_1
1718 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1719 it will record them in this list.
1721 If it allocated a pseudo for any replacement, we copy into
1722 it here. */
1724 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1725 &replacements);
1727 /* If this is last_parm_insn, and any instructions were output
1728 after it to fix it up, then we must set last_parm_insn to
1729 the last such instruction emitted. */
1730 if (insn == last_parm_insn)
1731 last_parm_insn = PREV_INSN (next_insn);
1733 while (replacements)
1735 if (GET_CODE (replacements->new) == REG)
1737 rtx insert_before;
1738 rtx seq;
1740 /* OLD might be a (subreg (mem)). */
1741 if (GET_CODE (replacements->old) == SUBREG)
1742 replacements->old
1743 = fixup_memory_subreg (replacements->old, insn, 0);
1744 else
1745 replacements->old
1746 = fixup_stack_1 (replacements->old, insn);
1748 insert_before = insn;
1750 /* If we are changing the mode, do a conversion.
1751 This might be wasteful, but combine.c will
1752 eliminate much of the waste. */
1754 if (GET_MODE (replacements->new)
1755 != GET_MODE (replacements->old))
1757 start_sequence ();
1758 convert_move (replacements->new,
1759 replacements->old, unsignedp);
1760 seq = gen_sequence ();
1761 end_sequence ();
1763 else
1764 seq = gen_move_insn (replacements->new,
1765 replacements->old);
1767 emit_insn_before (seq, insert_before);
1770 replacements = replacements->next;
1774 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1775 But don't touch other insns referred to by reg-notes;
1776 we will get them elsewhere. */
1777 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1778 if (GET_CODE (note) != INSN_LIST)
1779 XEXP (note, 0)
1780 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1782 insn = next;
1786 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1787 See if the rtx expression at *LOC in INSN needs to be changed.
1789 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1790 contain a list of original rtx's and replacements. If we find that we need
1791 to modify this insn by replacing a memory reference with a pseudo or by
1792 making a new MEM to implement a SUBREG, we consult that list to see if
1793 we have already chosen a replacement. If none has already been allocated,
1794 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1795 or the SUBREG, as appropriate, to the pseudo. */
1797 static void
1798 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1799 register rtx var;
1800 enum machine_mode promoted_mode;
1801 register rtx *loc;
1802 rtx insn;
1803 struct fixup_replacement **replacements;
1805 register int i;
1806 register rtx x = *loc;
1807 RTX_CODE code = GET_CODE (x);
1808 register char *fmt;
1809 register rtx tem, tem1;
1810 struct fixup_replacement *replacement;
1812 switch (code)
1814 case ADDRESSOF:
1815 if (XEXP (x, 0) == var)
1817 /* Prevent sharing of rtl that might lose. */
1818 rtx sub = copy_rtx (XEXP (var, 0));
1820 start_sequence ();
1822 if (! validate_change (insn, loc, sub, 0))
1824 rtx y = force_operand (sub, NULL_RTX);
1826 if (! validate_change (insn, loc, y, 0))
1827 *loc = copy_to_reg (y);
1830 emit_insn_before (gen_sequence (), insn);
1831 end_sequence ();
1833 return;
1835 case MEM:
1836 if (var == x)
1838 /* If we already have a replacement, use it. Otherwise,
1839 try to fix up this address in case it is invalid. */
1841 replacement = find_fixup_replacement (replacements, var);
1842 if (replacement->new)
1844 *loc = replacement->new;
1845 return;
1848 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1850 /* Unless we are forcing memory to register or we changed the mode,
1851 we can leave things the way they are if the insn is valid. */
1853 INSN_CODE (insn) = -1;
1854 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1855 && recog_memoized (insn) >= 0)
1856 return;
1858 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1859 return;
1862 /* If X contains VAR, we need to unshare it here so that we update
1863 each occurrence separately. But all identical MEMs in one insn
1864 must be replaced with the same rtx because of the possibility of
1865 MATCH_DUPs. */
1867 if (reg_mentioned_p (var, x))
1869 replacement = find_fixup_replacement (replacements, x);
1870 if (replacement->new == 0)
1871 replacement->new = copy_most_rtx (x, var);
1873 *loc = x = replacement->new;
1875 break;
1877 case REG:
1878 case CC0:
1879 case PC:
1880 case CONST_INT:
1881 case CONST:
1882 case SYMBOL_REF:
1883 case LABEL_REF:
1884 case CONST_DOUBLE:
1885 return;
1887 case SIGN_EXTRACT:
1888 case ZERO_EXTRACT:
1889 /* Note that in some cases those types of expressions are altered
1890 by optimize_bit_field, and do not survive to get here. */
1891 if (XEXP (x, 0) == var
1892 || (GET_CODE (XEXP (x, 0)) == SUBREG
1893 && SUBREG_REG (XEXP (x, 0)) == var))
1895 /* Get TEM as a valid MEM in the mode presently in the insn.
1897 We don't worry about the possibility of MATCH_DUP here; it
1898 is highly unlikely and would be tricky to handle. */
1900 tem = XEXP (x, 0);
1901 if (GET_CODE (tem) == SUBREG)
1903 if (GET_MODE_BITSIZE (GET_MODE (tem))
1904 > GET_MODE_BITSIZE (GET_MODE (var)))
1906 replacement = find_fixup_replacement (replacements, var);
1907 if (replacement->new == 0)
1908 replacement->new = gen_reg_rtx (GET_MODE (var));
1909 SUBREG_REG (tem) = replacement->new;
1911 else
1912 tem = fixup_memory_subreg (tem, insn, 0);
1914 else
1915 tem = fixup_stack_1 (tem, insn);
1917 /* Unless we want to load from memory, get TEM into the proper mode
1918 for an extract from memory. This can only be done if the
1919 extract is at a constant position and length. */
1921 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1922 && GET_CODE (XEXP (x, 2)) == CONST_INT
1923 && ! mode_dependent_address_p (XEXP (tem, 0))
1924 && ! MEM_VOLATILE_P (tem))
1926 enum machine_mode wanted_mode = VOIDmode;
1927 enum machine_mode is_mode = GET_MODE (tem);
1928 HOST_WIDE_INT width = INTVAL (XEXP (x, 1));
1929 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1931 #ifdef HAVE_extzv
1932 if (GET_CODE (x) == ZERO_EXTRACT)
1933 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1934 #endif
1935 #ifdef HAVE_extv
1936 if (GET_CODE (x) == SIGN_EXTRACT)
1937 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1938 #endif
1939 /* If we have a narrower mode, we can do something. */
1940 if (wanted_mode != VOIDmode
1941 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1943 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1944 rtx old_pos = XEXP (x, 2);
1945 rtx newmem;
1947 /* If the bytes and bits are counted differently, we
1948 must adjust the offset. */
1949 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1950 offset = (GET_MODE_SIZE (is_mode)
1951 - GET_MODE_SIZE (wanted_mode) - offset);
1953 pos %= GET_MODE_BITSIZE (wanted_mode);
1955 newmem = gen_rtx (MEM, wanted_mode,
1956 plus_constant (XEXP (tem, 0), offset));
1957 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1958 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1959 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1961 /* Make the change and see if the insn remains valid. */
1962 INSN_CODE (insn) = -1;
1963 XEXP (x, 0) = newmem;
1964 XEXP (x, 2) = GEN_INT (pos);
1966 if (recog_memoized (insn) >= 0)
1967 return;
1969 /* Otherwise, restore old position. XEXP (x, 0) will be
1970 restored later. */
1971 XEXP (x, 2) = old_pos;
1975 /* If we get here, the bitfield extract insn can't accept a memory
1976 reference. Copy the input into a register. */
1978 tem1 = gen_reg_rtx (GET_MODE (tem));
1979 emit_insn_before (gen_move_insn (tem1, tem), insn);
1980 XEXP (x, 0) = tem1;
1981 return;
1983 break;
1985 case SUBREG:
1986 if (SUBREG_REG (x) == var)
1988 /* If this is a special SUBREG made because VAR was promoted
1989 from a wider mode, replace it with VAR and call ourself
1990 recursively, this time saying that the object previously
1991 had its current mode (by virtue of the SUBREG). */
1993 if (SUBREG_PROMOTED_VAR_P (x))
1995 *loc = var;
1996 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1997 return;
2000 /* If this SUBREG makes VAR wider, it has become a paradoxical
2001 SUBREG with VAR in memory, but these aren't allowed at this
2002 stage of the compilation. So load VAR into a pseudo and take
2003 a SUBREG of that pseudo. */
2004 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2006 replacement = find_fixup_replacement (replacements, var);
2007 if (replacement->new == 0)
2008 replacement->new = gen_reg_rtx (GET_MODE (var));
2009 SUBREG_REG (x) = replacement->new;
2010 return;
2013 /* See if we have already found a replacement for this SUBREG.
2014 If so, use it. Otherwise, make a MEM and see if the insn
2015 is recognized. If not, or if we should force MEM into a register,
2016 make a pseudo for this SUBREG. */
2017 replacement = find_fixup_replacement (replacements, x);
2018 if (replacement->new)
2020 *loc = replacement->new;
2021 return;
2024 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2026 INSN_CODE (insn) = -1;
2027 if (! flag_force_mem && recog_memoized (insn) >= 0)
2028 return;
2030 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2031 return;
2033 break;
2035 case SET:
2036 /* First do special simplification of bit-field references. */
2037 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2038 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2039 optimize_bit_field (x, insn, 0);
2040 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2041 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2042 optimize_bit_field (x, insn, NULL_PTR);
2044 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2045 into a register and then store it back out. */
2046 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2047 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2048 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2049 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2050 > GET_MODE_SIZE (GET_MODE (var))))
2052 replacement = find_fixup_replacement (replacements, var);
2053 if (replacement->new == 0)
2054 replacement->new = gen_reg_rtx (GET_MODE (var));
2056 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2057 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2060 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2061 insn into a pseudo and store the low part of the pseudo into VAR. */
2062 if (GET_CODE (SET_DEST (x)) == SUBREG
2063 && SUBREG_REG (SET_DEST (x)) == var
2064 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2065 > GET_MODE_SIZE (GET_MODE (var))))
2067 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2068 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2069 tem)),
2070 insn);
2071 break;
2075 rtx dest = SET_DEST (x);
2076 rtx src = SET_SRC (x);
2077 rtx outerdest = dest;
2079 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2080 || GET_CODE (dest) == SIGN_EXTRACT
2081 || GET_CODE (dest) == ZERO_EXTRACT)
2082 dest = XEXP (dest, 0);
2084 if (GET_CODE (src) == SUBREG)
2085 src = XEXP (src, 0);
2087 /* If VAR does not appear at the top level of the SET
2088 just scan the lower levels of the tree. */
2090 if (src != var && dest != var)
2091 break;
2093 /* We will need to rerecognize this insn. */
2094 INSN_CODE (insn) = -1;
2096 #ifdef HAVE_insv
2097 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2099 /* Since this case will return, ensure we fixup all the
2100 operands here. */
2101 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2102 insn, replacements);
2103 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2104 insn, replacements);
2105 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2106 insn, replacements);
2108 tem = XEXP (outerdest, 0);
2110 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2111 that may appear inside a ZERO_EXTRACT.
2112 This was legitimate when the MEM was a REG. */
2113 if (GET_CODE (tem) == SUBREG
2114 && SUBREG_REG (tem) == var)
2115 tem = fixup_memory_subreg (tem, insn, 0);
2116 else
2117 tem = fixup_stack_1 (tem, insn);
2119 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2120 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2121 && ! mode_dependent_address_p (XEXP (tem, 0))
2122 && ! MEM_VOLATILE_P (tem))
2124 enum machine_mode wanted_mode
2125 = insn_operand_mode[(int) CODE_FOR_insv][0];
2126 enum machine_mode is_mode = GET_MODE (tem);
2127 HOST_WIDE_INT width = INTVAL (XEXP (outerdest, 1));
2128 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2130 /* If we have a narrower mode, we can do something. */
2131 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2133 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2134 rtx old_pos = XEXP (outerdest, 2);
2135 rtx newmem;
2137 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2138 offset = (GET_MODE_SIZE (is_mode)
2139 - GET_MODE_SIZE (wanted_mode) - offset);
2141 pos %= GET_MODE_BITSIZE (wanted_mode);
2143 newmem = gen_rtx (MEM, wanted_mode,
2144 plus_constant (XEXP (tem, 0), offset));
2145 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2146 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2147 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2149 /* Make the change and see if the insn remains valid. */
2150 INSN_CODE (insn) = -1;
2151 XEXP (outerdest, 0) = newmem;
2152 XEXP (outerdest, 2) = GEN_INT (pos);
2154 if (recog_memoized (insn) >= 0)
2155 return;
2157 /* Otherwise, restore old position. XEXP (x, 0) will be
2158 restored later. */
2159 XEXP (outerdest, 2) = old_pos;
2163 /* If we get here, the bit-field store doesn't allow memory
2164 or isn't located at a constant position. Load the value into
2165 a register, do the store, and put it back into memory. */
2167 tem1 = gen_reg_rtx (GET_MODE (tem));
2168 emit_insn_before (gen_move_insn (tem1, tem), insn);
2169 emit_insn_after (gen_move_insn (tem, tem1), insn);
2170 XEXP (outerdest, 0) = tem1;
2171 return;
2173 #endif
2175 /* STRICT_LOW_PART is a no-op on memory references
2176 and it can cause combinations to be unrecognizable,
2177 so eliminate it. */
2179 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2180 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2182 /* A valid insn to copy VAR into or out of a register
2183 must be left alone, to avoid an infinite loop here.
2184 If the reference to VAR is by a subreg, fix that up,
2185 since SUBREG is not valid for a memref.
2186 Also fix up the address of the stack slot.
2188 Note that we must not try to recognize the insn until
2189 after we know that we have valid addresses and no
2190 (subreg (mem ...) ...) constructs, since these interfere
2191 with determining the validity of the insn. */
2193 if ((SET_SRC (x) == var
2194 || (GET_CODE (SET_SRC (x)) == SUBREG
2195 && SUBREG_REG (SET_SRC (x)) == var))
2196 && (GET_CODE (SET_DEST (x)) == REG
2197 || (GET_CODE (SET_DEST (x)) == SUBREG
2198 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2199 && GET_MODE (var) == promoted_mode
2200 && x == single_set (insn))
2202 rtx pat;
2204 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2205 if (replacement->new)
2206 SET_SRC (x) = replacement->new;
2207 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2208 SET_SRC (x) = replacement->new
2209 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2210 else
2211 SET_SRC (x) = replacement->new
2212 = fixup_stack_1 (SET_SRC (x), insn);
2214 if (recog_memoized (insn) >= 0)
2215 return;
2217 /* INSN is not valid, but we know that we want to
2218 copy SET_SRC (x) to SET_DEST (x) in some way. So
2219 we generate the move and see whether it requires more
2220 than one insn. If it does, we emit those insns and
2221 delete INSN. Otherwise, we an just replace the pattern
2222 of INSN; we have already verified above that INSN has
2223 no other function that to do X. */
2225 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2226 if (GET_CODE (pat) == SEQUENCE)
2228 emit_insn_after (pat, insn);
2229 PUT_CODE (insn, NOTE);
2230 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2231 NOTE_SOURCE_FILE (insn) = 0;
2233 else
2234 PATTERN (insn) = pat;
2236 return;
2239 if ((SET_DEST (x) == var
2240 || (GET_CODE (SET_DEST (x)) == SUBREG
2241 && SUBREG_REG (SET_DEST (x)) == var))
2242 && (GET_CODE (SET_SRC (x)) == REG
2243 || (GET_CODE (SET_SRC (x)) == SUBREG
2244 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2245 && GET_MODE (var) == promoted_mode
2246 && x == single_set (insn))
2248 rtx pat;
2250 if (GET_CODE (SET_DEST (x)) == SUBREG)
2251 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2252 else
2253 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2255 if (recog_memoized (insn) >= 0)
2256 return;
2258 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2259 if (GET_CODE (pat) == SEQUENCE)
2261 emit_insn_after (pat, insn);
2262 PUT_CODE (insn, NOTE);
2263 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2264 NOTE_SOURCE_FILE (insn) = 0;
2266 else
2267 PATTERN (insn) = pat;
2269 return;
2272 /* Otherwise, storing into VAR must be handled specially
2273 by storing into a temporary and copying that into VAR
2274 with a new insn after this one. Note that this case
2275 will be used when storing into a promoted scalar since
2276 the insn will now have different modes on the input
2277 and output and hence will be invalid (except for the case
2278 of setting it to a constant, which does not need any
2279 change if it is valid). We generate extra code in that case,
2280 but combine.c will eliminate it. */
2282 if (dest == var)
2284 rtx temp;
2285 rtx fixeddest = SET_DEST (x);
2287 /* STRICT_LOW_PART can be discarded, around a MEM. */
2288 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2289 fixeddest = XEXP (fixeddest, 0);
2290 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2291 if (GET_CODE (fixeddest) == SUBREG)
2293 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2294 promoted_mode = GET_MODE (fixeddest);
2296 else
2297 fixeddest = fixup_stack_1 (fixeddest, insn);
2299 temp = gen_reg_rtx (promoted_mode);
2301 emit_insn_after (gen_move_insn (fixeddest,
2302 gen_lowpart (GET_MODE (fixeddest),
2303 temp)),
2304 insn);
2306 SET_DEST (x) = temp;
2310 default:
2311 break;
2314 /* Nothing special about this RTX; fix its operands. */
2316 fmt = GET_RTX_FORMAT (code);
2317 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2319 if (fmt[i] == 'e')
2320 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2321 if (fmt[i] == 'E')
2323 register int j;
2324 for (j = 0; j < XVECLEN (x, i); j++)
2325 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2326 insn, replacements);
2331 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2332 return an rtx (MEM:m1 newaddr) which is equivalent.
2333 If any insns must be emitted to compute NEWADDR, put them before INSN.
2335 UNCRITICAL nonzero means accept paradoxical subregs.
2336 This is used for subregs found inside REG_NOTES. */
2338 static rtx
2339 fixup_memory_subreg (x, insn, uncritical)
2340 rtx x;
2341 rtx insn;
2342 int uncritical;
2344 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2345 rtx addr = XEXP (SUBREG_REG (x), 0);
2346 enum machine_mode mode = GET_MODE (x);
2347 rtx saved, result;
2349 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2350 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2351 && ! uncritical)
2352 abort ();
2354 if (BYTES_BIG_ENDIAN)
2355 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2356 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2357 addr = plus_constant (addr, offset);
2358 if (!flag_force_addr && memory_address_p (mode, addr))
2359 /* Shortcut if no insns need be emitted. */
2360 return change_address (SUBREG_REG (x), mode, addr);
2361 start_sequence ();
2362 result = change_address (SUBREG_REG (x), mode, addr);
2363 emit_insn_before (gen_sequence (), insn);
2364 end_sequence ();
2365 return result;
2368 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2369 Replace subexpressions of X in place.
2370 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2371 Otherwise return X, with its contents possibly altered.
2373 If any insns must be emitted to compute NEWADDR, put them before INSN.
2375 UNCRITICAL is as in fixup_memory_subreg. */
2377 static rtx
2378 walk_fixup_memory_subreg (x, insn, uncritical)
2379 register rtx x;
2380 rtx insn;
2381 int uncritical;
2383 register enum rtx_code code;
2384 register char *fmt;
2385 register int i;
2387 if (x == 0)
2388 return 0;
2390 code = GET_CODE (x);
2392 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2393 return fixup_memory_subreg (x, insn, uncritical);
2395 /* Nothing special about this RTX; fix its operands. */
2397 fmt = GET_RTX_FORMAT (code);
2398 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2400 if (fmt[i] == 'e')
2401 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2402 if (fmt[i] == 'E')
2404 register int j;
2405 for (j = 0; j < XVECLEN (x, i); j++)
2406 XVECEXP (x, i, j)
2407 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2410 return x;
2413 /* For each memory ref within X, if it refers to a stack slot
2414 with an out of range displacement, put the address in a temp register
2415 (emitting new insns before INSN to load these registers)
2416 and alter the memory ref to use that register.
2417 Replace each such MEM rtx with a copy, to avoid clobberage. */
2419 static rtx
2420 fixup_stack_1 (x, insn)
2421 rtx x;
2422 rtx insn;
2424 register int i;
2425 register RTX_CODE code = GET_CODE (x);
2426 register char *fmt;
2428 if (code == MEM)
2430 register rtx ad = XEXP (x, 0);
2431 /* If we have address of a stack slot but it's not valid
2432 (displacement is too large), compute the sum in a register. */
2433 if (GET_CODE (ad) == PLUS
2434 && GET_CODE (XEXP (ad, 0)) == REG
2435 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2436 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2437 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2438 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2439 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2440 #endif
2441 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2442 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2443 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2444 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2446 rtx temp, seq;
2447 if (memory_address_p (GET_MODE (x), ad))
2448 return x;
2450 start_sequence ();
2451 temp = copy_to_reg (ad);
2452 seq = gen_sequence ();
2453 end_sequence ();
2454 emit_insn_before (seq, insn);
2455 return change_address (x, VOIDmode, temp);
2457 return x;
2460 fmt = GET_RTX_FORMAT (code);
2461 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2463 if (fmt[i] == 'e')
2464 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2465 if (fmt[i] == 'E')
2467 register int j;
2468 for (j = 0; j < XVECLEN (x, i); j++)
2469 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2472 return x;
2475 /* Optimization: a bit-field instruction whose field
2476 happens to be a byte or halfword in memory
2477 can be changed to a move instruction.
2479 We call here when INSN is an insn to examine or store into a bit-field.
2480 BODY is the SET-rtx to be altered.
2482 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2483 (Currently this is called only from function.c, and EQUIV_MEM
2484 is always 0.) */
2486 static void
2487 optimize_bit_field (body, insn, equiv_mem)
2488 rtx body;
2489 rtx insn;
2490 rtx *equiv_mem;
2492 register rtx bitfield;
2493 int destflag;
2494 rtx seq = 0;
2495 enum machine_mode mode;
2497 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2498 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2499 bitfield = SET_DEST (body), destflag = 1;
2500 else
2501 bitfield = SET_SRC (body), destflag = 0;
2503 /* First check that the field being stored has constant size and position
2504 and is in fact a byte or halfword suitably aligned. */
2506 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2507 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2508 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2509 != BLKmode)
2510 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2512 register rtx memref = 0;
2514 /* Now check that the containing word is memory, not a register,
2515 and that it is safe to change the machine mode. */
2517 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2518 memref = XEXP (bitfield, 0);
2519 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2520 && equiv_mem != 0)
2521 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2522 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2523 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2524 memref = SUBREG_REG (XEXP (bitfield, 0));
2525 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2526 && equiv_mem != 0
2527 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2528 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2530 if (memref
2531 && ! mode_dependent_address_p (XEXP (memref, 0))
2532 && ! MEM_VOLATILE_P (memref))
2534 /* Now adjust the address, first for any subreg'ing
2535 that we are now getting rid of,
2536 and then for which byte of the word is wanted. */
2538 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2539 rtx insns;
2541 /* Adjust OFFSET to count bits from low-address byte. */
2542 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2543 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2544 - offset - INTVAL (XEXP (bitfield, 1)));
2546 /* Adjust OFFSET to count bytes from low-address byte. */
2547 offset /= BITS_PER_UNIT;
2548 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2550 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2551 if (BYTES_BIG_ENDIAN)
2552 offset -= (MIN (UNITS_PER_WORD,
2553 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2554 - MIN (UNITS_PER_WORD,
2555 GET_MODE_SIZE (GET_MODE (memref))));
2558 start_sequence ();
2559 memref = change_address (memref, mode,
2560 plus_constant (XEXP (memref, 0), offset));
2561 insns = get_insns ();
2562 end_sequence ();
2563 emit_insns_before (insns, insn);
2565 /* Store this memory reference where
2566 we found the bit field reference. */
2568 if (destflag)
2570 validate_change (insn, &SET_DEST (body), memref, 1);
2571 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2573 rtx src = SET_SRC (body);
2574 while (GET_CODE (src) == SUBREG
2575 && SUBREG_WORD (src) == 0)
2576 src = SUBREG_REG (src);
2577 if (GET_MODE (src) != GET_MODE (memref))
2578 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2579 validate_change (insn, &SET_SRC (body), src, 1);
2581 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2582 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2583 /* This shouldn't happen because anything that didn't have
2584 one of these modes should have got converted explicitly
2585 and then referenced through a subreg.
2586 This is so because the original bit-field was
2587 handled by agg_mode and so its tree structure had
2588 the same mode that memref now has. */
2589 abort ();
2591 else
2593 rtx dest = SET_DEST (body);
2595 while (GET_CODE (dest) == SUBREG
2596 && SUBREG_WORD (dest) == 0
2597 && (GET_MODE_CLASS (GET_MODE (dest))
2598 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2599 dest = SUBREG_REG (dest);
2601 validate_change (insn, &SET_DEST (body), dest, 1);
2603 if (GET_MODE (dest) == GET_MODE (memref))
2604 validate_change (insn, &SET_SRC (body), memref, 1);
2605 else
2607 /* Convert the mem ref to the destination mode. */
2608 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2610 start_sequence ();
2611 convert_move (newreg, memref,
2612 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2613 seq = get_insns ();
2614 end_sequence ();
2616 validate_change (insn, &SET_SRC (body), newreg, 1);
2620 /* See if we can convert this extraction or insertion into
2621 a simple move insn. We might not be able to do so if this
2622 was, for example, part of a PARALLEL.
2624 If we succeed, write out any needed conversions. If we fail,
2625 it is hard to guess why we failed, so don't do anything
2626 special; just let the optimization be suppressed. */
2628 if (apply_change_group () && seq)
2629 emit_insns_before (seq, insn);
2634 /* These routines are responsible for converting virtual register references
2635 to the actual hard register references once RTL generation is complete.
2637 The following four variables are used for communication between the
2638 routines. They contain the offsets of the virtual registers from their
2639 respective hard registers. */
2641 static int in_arg_offset;
2642 static int var_offset;
2643 static int dynamic_offset;
2644 static int out_arg_offset;
2646 /* In most machines, the stack pointer register is equivalent to the bottom
2647 of the stack. */
2649 #ifndef STACK_POINTER_OFFSET
2650 #define STACK_POINTER_OFFSET 0
2651 #endif
2653 /* If not defined, pick an appropriate default for the offset of dynamically
2654 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2655 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2657 #ifndef STACK_DYNAMIC_OFFSET
2659 #ifdef ACCUMULATE_OUTGOING_ARGS
2660 /* The bottom of the stack points to the actual arguments. If
2661 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2662 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2663 stack space for register parameters is not pushed by the caller, but
2664 rather part of the fixed stack areas and hence not included in
2665 `current_function_outgoing_args_size'. Nevertheless, we must allow
2666 for it when allocating stack dynamic objects. */
2668 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2669 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2670 (current_function_outgoing_args_size \
2671 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2673 #else
2674 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2675 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2676 #endif
2678 #else
2679 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2680 #endif
2681 #endif
2683 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2684 its address taken. DECL is the decl for the object stored in the
2685 register, for later use if we do need to force REG into the stack.
2686 REG is overwritten by the MEM like in put_reg_into_stack. */
2689 gen_mem_addressof (reg, decl)
2690 rtx reg;
2691 tree decl;
2693 tree type = TREE_TYPE (decl);
2695 rtx r = gen_rtx (ADDRESSOF, Pmode, gen_reg_rtx (GET_MODE (reg)));
2696 ADDRESSOF_REGNO (r) = REGNO (reg);
2697 SET_ADDRESSOF_DECL (r, decl);
2699 XEXP (reg, 0) = r;
2700 PUT_CODE (reg, MEM);
2701 PUT_MODE (reg, DECL_MODE (decl));
2702 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2703 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2705 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2706 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2708 return reg;
2711 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2713 void
2714 flush_addressof (decl)
2715 tree decl;
2717 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2718 && DECL_RTL (decl) != 0
2719 && GET_CODE (DECL_RTL (decl)) == MEM
2720 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2721 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2722 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2725 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2727 static void
2728 put_addressof_into_stack (r)
2729 rtx r;
2731 tree decl = ADDRESSOF_DECL (r);
2732 rtx reg = XEXP (r, 0);
2734 if (GET_CODE (reg) != REG)
2735 abort ();
2737 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2738 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2739 ADDRESSOF_REGNO (r),
2740 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2743 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2744 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2745 the stack. */
2747 static void
2748 purge_addressof_1 (loc, insn, force)
2749 rtx *loc;
2750 rtx insn;
2751 int force;
2753 rtx x;
2754 RTX_CODE code;
2755 int i, j;
2756 char *fmt;
2758 /* Re-start here to avoid recursion in common cases. */
2759 restart:
2761 x = *loc;
2762 if (x == 0)
2763 return;
2765 code = GET_CODE (x);
2767 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2769 rtx insns;
2770 /* We must create a copy of the rtx because it was created by
2771 overwriting a REG rtx which is always shared. */
2772 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2774 if (validate_change (insn, loc, sub, 0))
2775 return;
2777 start_sequence ();
2778 if (! validate_change (insn, loc,
2779 force_operand (sub, NULL_RTX),
2781 abort ();
2783 insns = get_insns ();
2784 end_sequence ();
2785 emit_insns_before (insns, insn);
2786 return;
2788 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2790 rtx sub = XEXP (XEXP (x, 0), 0);
2792 if (GET_CODE (sub) == MEM)
2793 sub = gen_rtx (MEM, GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2795 if (GET_CODE (sub) == REG && MEM_VOLATILE_P (x))
2797 put_addressof_into_stack (XEXP (x, 0));
2798 return;
2800 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2802 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2804 rtx sub2 = gen_rtx (SUBREG, GET_MODE (x), sub, 0);
2805 if (validate_change (insn, loc, sub2, 0))
2806 goto restart;
2809 else if (validate_change (insn, loc, sub, 0))
2810 goto restart;
2811 /* else give up and put it into the stack */
2813 else if (code == ADDRESSOF)
2815 put_addressof_into_stack (x);
2816 return;
2819 /* Scan all subexpressions. */
2820 fmt = GET_RTX_FORMAT (code);
2821 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2823 if (*fmt == 'e')
2824 purge_addressof_1 (&XEXP (x, i), insn, force);
2825 else if (*fmt == 'E')
2826 for (j = 0; j < XVECLEN (x, i); j++)
2827 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2831 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2832 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2833 stack. */
2835 void
2836 purge_addressof (insns)
2837 rtx insns;
2839 rtx insn;
2840 for (insn = insns; insn; insn = NEXT_INSN (insn))
2841 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2842 || GET_CODE (insn) == CALL_INSN)
2844 purge_addressof_1 (&PATTERN (insn), insn,
2845 asm_noperands (PATTERN (insn)) > 0);
2846 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2850 /* Pass through the INSNS of function FNDECL and convert virtual register
2851 references to hard register references. */
2853 void
2854 instantiate_virtual_regs (fndecl, insns)
2855 tree fndecl;
2856 rtx insns;
2858 rtx insn;
2859 int i;
2861 /* Compute the offsets to use for this function. */
2862 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2863 var_offset = STARTING_FRAME_OFFSET;
2864 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2865 out_arg_offset = STACK_POINTER_OFFSET;
2867 /* Scan all variables and parameters of this function. For each that is
2868 in memory, instantiate all virtual registers if the result is a valid
2869 address. If not, we do it later. That will handle most uses of virtual
2870 regs on many machines. */
2871 instantiate_decls (fndecl, 1);
2873 /* Initialize recognition, indicating that volatile is OK. */
2874 init_recog ();
2876 /* Scan through all the insns, instantiating every virtual register still
2877 present. */
2878 for (insn = insns; insn; insn = NEXT_INSN (insn))
2879 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2880 || GET_CODE (insn) == CALL_INSN)
2882 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2883 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2886 /* Instantiate the stack slots for the parm registers, for later use in
2887 addressof elimination. */
2888 for (i = 0; i < max_parm_reg; ++i)
2889 if (parm_reg_stack_loc[i])
2890 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2892 /* Now instantiate the remaining register equivalences for debugging info.
2893 These will not be valid addresses. */
2894 instantiate_decls (fndecl, 0);
2896 /* Indicate that, from now on, assign_stack_local should use
2897 frame_pointer_rtx. */
2898 virtuals_instantiated = 1;
2901 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2902 all virtual registers in their DECL_RTL's.
2904 If VALID_ONLY, do this only if the resulting address is still valid.
2905 Otherwise, always do it. */
2907 static void
2908 instantiate_decls (fndecl, valid_only)
2909 tree fndecl;
2910 int valid_only;
2912 tree decl;
2914 if (DECL_SAVED_INSNS (fndecl))
2915 /* When compiling an inline function, the obstack used for
2916 rtl allocation is the maybepermanent_obstack. Calling
2917 `resume_temporary_allocation' switches us back to that
2918 obstack while we process this function's parameters. */
2919 resume_temporary_allocation ();
2921 /* Process all parameters of the function. */
2922 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2924 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
2926 instantiate_decl (DECL_RTL (decl), size, valid_only);
2928 /* If the parameter was promoted, then the incoming RTL mode may be
2929 larger than the declared type size. We must use the larger of
2930 the two sizes. */
2931 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2932 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2935 /* Now process all variables defined in the function or its subblocks. */
2936 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2938 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2940 /* Save all rtl allocated for this function by raising the
2941 high-water mark on the maybepermanent_obstack. */
2942 preserve_data ();
2943 /* All further rtl allocation is now done in the current_obstack. */
2944 rtl_in_current_obstack ();
2948 /* Subroutine of instantiate_decls: Process all decls in the given
2949 BLOCK node and all its subblocks. */
2951 static void
2952 instantiate_decls_1 (let, valid_only)
2953 tree let;
2954 int valid_only;
2956 tree t;
2958 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2959 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2960 valid_only);
2962 /* Process all subblocks. */
2963 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2964 instantiate_decls_1 (t, valid_only);
2967 /* Subroutine of the preceding procedures: Given RTL representing a
2968 decl and the size of the object, do any instantiation required.
2970 If VALID_ONLY is non-zero, it means that the RTL should only be
2971 changed if the new address is valid. */
2973 static void
2974 instantiate_decl (x, size, valid_only)
2975 rtx x;
2976 int size;
2977 int valid_only;
2979 enum machine_mode mode;
2980 rtx addr;
2982 /* If this is not a MEM, no need to do anything. Similarly if the
2983 address is a constant or a register that is not a virtual register. */
2985 if (x == 0 || GET_CODE (x) != MEM)
2986 return;
2988 addr = XEXP (x, 0);
2989 if (CONSTANT_P (addr)
2990 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
2991 || (GET_CODE (addr) == REG
2992 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2993 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2994 return;
2996 /* If we should only do this if the address is valid, copy the address.
2997 We need to do this so we can undo any changes that might make the
2998 address invalid. This copy is unfortunate, but probably can't be
2999 avoided. */
3001 if (valid_only)
3002 addr = copy_rtx (addr);
3004 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3006 if (valid_only)
3008 /* Now verify that the resulting address is valid for every integer or
3009 floating-point mode up to and including SIZE bytes long. We do this
3010 since the object might be accessed in any mode and frame addresses
3011 are shared. */
3013 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3014 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3015 mode = GET_MODE_WIDER_MODE (mode))
3016 if (! memory_address_p (mode, addr))
3017 return;
3019 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3020 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3021 mode = GET_MODE_WIDER_MODE (mode))
3022 if (! memory_address_p (mode, addr))
3023 return;
3026 /* Put back the address now that we have updated it and we either know
3027 it is valid or we don't care whether it is valid. */
3029 XEXP (x, 0) = addr;
3032 /* Given a pointer to a piece of rtx and an optional pointer to the
3033 containing object, instantiate any virtual registers present in it.
3035 If EXTRA_INSNS, we always do the replacement and generate
3036 any extra insns before OBJECT. If it zero, we do nothing if replacement
3037 is not valid.
3039 Return 1 if we either had nothing to do or if we were able to do the
3040 needed replacement. Return 0 otherwise; we only return zero if
3041 EXTRA_INSNS is zero.
3043 We first try some simple transformations to avoid the creation of extra
3044 pseudos. */
3046 static int
3047 instantiate_virtual_regs_1 (loc, object, extra_insns)
3048 rtx *loc;
3049 rtx object;
3050 int extra_insns;
3052 rtx x;
3053 RTX_CODE code;
3054 rtx new = 0;
3055 HOST_WIDE_INT offset;
3056 rtx temp;
3057 rtx seq;
3058 int i, j;
3059 char *fmt;
3061 /* Re-start here to avoid recursion in common cases. */
3062 restart:
3064 x = *loc;
3065 if (x == 0)
3066 return 1;
3068 code = GET_CODE (x);
3070 /* Check for some special cases. */
3071 switch (code)
3073 case CONST_INT:
3074 case CONST_DOUBLE:
3075 case CONST:
3076 case SYMBOL_REF:
3077 case CODE_LABEL:
3078 case PC:
3079 case CC0:
3080 case ASM_INPUT:
3081 case ADDR_VEC:
3082 case ADDR_DIFF_VEC:
3083 case RETURN:
3084 return 1;
3086 case SET:
3087 /* We are allowed to set the virtual registers. This means that
3088 that the actual register should receive the source minus the
3089 appropriate offset. This is used, for example, in the handling
3090 of non-local gotos. */
3091 if (SET_DEST (x) == virtual_incoming_args_rtx)
3092 new = arg_pointer_rtx, offset = - in_arg_offset;
3093 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3094 new = frame_pointer_rtx, offset = - var_offset;
3095 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3096 new = stack_pointer_rtx, offset = - dynamic_offset;
3097 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3098 new = stack_pointer_rtx, offset = - out_arg_offset;
3100 if (new)
3102 /* The only valid sources here are PLUS or REG. Just do
3103 the simplest possible thing to handle them. */
3104 if (GET_CODE (SET_SRC (x)) != REG
3105 && GET_CODE (SET_SRC (x)) != PLUS)
3106 abort ();
3108 start_sequence ();
3109 if (GET_CODE (SET_SRC (x)) != REG)
3110 temp = force_operand (SET_SRC (x), NULL_RTX);
3111 else
3112 temp = SET_SRC (x);
3113 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3114 seq = get_insns ();
3115 end_sequence ();
3117 emit_insns_before (seq, object);
3118 SET_DEST (x) = new;
3120 if (! validate_change (object, &SET_SRC (x), temp, 0)
3121 || ! extra_insns)
3122 abort ();
3124 return 1;
3127 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3128 loc = &SET_SRC (x);
3129 goto restart;
3131 case PLUS:
3132 /* Handle special case of virtual register plus constant. */
3133 if (CONSTANT_P (XEXP (x, 1)))
3135 rtx old, new_offset;
3137 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3138 if (GET_CODE (XEXP (x, 0)) == PLUS)
3140 rtx inner = XEXP (XEXP (x, 0), 0);
3142 if (inner == virtual_incoming_args_rtx)
3143 new = arg_pointer_rtx, offset = in_arg_offset;
3144 else if (inner == virtual_stack_vars_rtx)
3145 new = frame_pointer_rtx, offset = var_offset;
3146 else if (inner == virtual_stack_dynamic_rtx)
3147 new = stack_pointer_rtx, offset = dynamic_offset;
3148 else if (inner == virtual_outgoing_args_rtx)
3149 new = stack_pointer_rtx, offset = out_arg_offset;
3150 else
3152 loc = &XEXP (x, 0);
3153 goto restart;
3156 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3157 extra_insns);
3158 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
3161 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3162 new = arg_pointer_rtx, offset = in_arg_offset;
3163 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3164 new = frame_pointer_rtx, offset = var_offset;
3165 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3166 new = stack_pointer_rtx, offset = dynamic_offset;
3167 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3168 new = stack_pointer_rtx, offset = out_arg_offset;
3169 else
3171 /* We know the second operand is a constant. Unless the
3172 first operand is a REG (which has been already checked),
3173 it needs to be checked. */
3174 if (GET_CODE (XEXP (x, 0)) != REG)
3176 loc = &XEXP (x, 0);
3177 goto restart;
3179 return 1;
3182 new_offset = plus_constant (XEXP (x, 1), offset);
3184 /* If the new constant is zero, try to replace the sum with just
3185 the register. */
3186 if (new_offset == const0_rtx
3187 && validate_change (object, loc, new, 0))
3188 return 1;
3190 /* Next try to replace the register and new offset.
3191 There are two changes to validate here and we can't assume that
3192 in the case of old offset equals new just changing the register
3193 will yield a valid insn. In the interests of a little efficiency,
3194 however, we only call validate change once (we don't queue up the
3195 changes and then call apply_change_group). */
3197 old = XEXP (x, 0);
3198 if (offset == 0
3199 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3200 : (XEXP (x, 0) = new,
3201 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3203 if (! extra_insns)
3205 XEXP (x, 0) = old;
3206 return 0;
3209 /* Otherwise copy the new constant into a register and replace
3210 constant with that register. */
3211 temp = gen_reg_rtx (Pmode);
3212 XEXP (x, 0) = new;
3213 if (validate_change (object, &XEXP (x, 1), temp, 0))
3214 emit_insn_before (gen_move_insn (temp, new_offset), object);
3215 else
3217 /* If that didn't work, replace this expression with a
3218 register containing the sum. */
3220 XEXP (x, 0) = old;
3221 new = gen_rtx (PLUS, Pmode, new, new_offset);
3223 start_sequence ();
3224 temp = force_operand (new, NULL_RTX);
3225 seq = get_insns ();
3226 end_sequence ();
3228 emit_insns_before (seq, object);
3229 if (! validate_change (object, loc, temp, 0)
3230 && ! validate_replace_rtx (x, temp, object))
3231 abort ();
3235 return 1;
3238 /* Fall through to generic two-operand expression case. */
3239 case EXPR_LIST:
3240 case CALL:
3241 case COMPARE:
3242 case MINUS:
3243 case MULT:
3244 case DIV: case UDIV:
3245 case MOD: case UMOD:
3246 case AND: case IOR: case XOR:
3247 case ROTATERT: case ROTATE:
3248 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3249 case NE: case EQ:
3250 case GE: case GT: case GEU: case GTU:
3251 case LE: case LT: case LEU: case LTU:
3252 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3253 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3254 loc = &XEXP (x, 0);
3255 goto restart;
3257 case MEM:
3258 /* Most cases of MEM that convert to valid addresses have already been
3259 handled by our scan of decls. The only special handling we
3260 need here is to make a copy of the rtx to ensure it isn't being
3261 shared if we have to change it to a pseudo.
3263 If the rtx is a simple reference to an address via a virtual register,
3264 it can potentially be shared. In such cases, first try to make it
3265 a valid address, which can also be shared. Otherwise, copy it and
3266 proceed normally.
3268 First check for common cases that need no processing. These are
3269 usually due to instantiation already being done on a previous instance
3270 of a shared rtx. */
3272 temp = XEXP (x, 0);
3273 if (CONSTANT_ADDRESS_P (temp)
3274 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3275 || temp == arg_pointer_rtx
3276 #endif
3277 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3278 || temp == hard_frame_pointer_rtx
3279 #endif
3280 || temp == frame_pointer_rtx)
3281 return 1;
3283 if (GET_CODE (temp) == PLUS
3284 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3285 && (XEXP (temp, 0) == frame_pointer_rtx
3286 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3287 || XEXP (temp, 0) == hard_frame_pointer_rtx
3288 #endif
3289 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3290 || XEXP (temp, 0) == arg_pointer_rtx
3291 #endif
3293 return 1;
3295 if (temp == virtual_stack_vars_rtx
3296 || temp == virtual_incoming_args_rtx
3297 || (GET_CODE (temp) == PLUS
3298 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3299 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3300 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3302 /* This MEM may be shared. If the substitution can be done without
3303 the need to generate new pseudos, we want to do it in place
3304 so all copies of the shared rtx benefit. The call below will
3305 only make substitutions if the resulting address is still
3306 valid.
3308 Note that we cannot pass X as the object in the recursive call
3309 since the insn being processed may not allow all valid
3310 addresses. However, if we were not passed on object, we can
3311 only modify X without copying it if X will have a valid
3312 address.
3314 ??? Also note that this can still lose if OBJECT is an insn that
3315 has less restrictions on an address that some other insn.
3316 In that case, we will modify the shared address. This case
3317 doesn't seem very likely, though. One case where this could
3318 happen is in the case of a USE or CLOBBER reference, but we
3319 take care of that below. */
3321 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3322 object ? object : x, 0))
3323 return 1;
3325 /* Otherwise make a copy and process that copy. We copy the entire
3326 RTL expression since it might be a PLUS which could also be
3327 shared. */
3328 *loc = x = copy_rtx (x);
3331 /* Fall through to generic unary operation case. */
3332 case SUBREG:
3333 case STRICT_LOW_PART:
3334 case NEG: case NOT:
3335 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3336 case SIGN_EXTEND: case ZERO_EXTEND:
3337 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3338 case FLOAT: case FIX:
3339 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3340 case ABS:
3341 case SQRT:
3342 case FFS:
3343 /* These case either have just one operand or we know that we need not
3344 check the rest of the operands. */
3345 loc = &XEXP (x, 0);
3346 goto restart;
3348 case USE:
3349 case CLOBBER:
3350 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3351 go ahead and make the invalid one, but do it to a copy. For a REG,
3352 just make the recursive call, since there's no chance of a problem. */
3354 if ((GET_CODE (XEXP (x, 0)) == MEM
3355 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3357 || (GET_CODE (XEXP (x, 0)) == REG
3358 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3359 return 1;
3361 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3362 loc = &XEXP (x, 0);
3363 goto restart;
3365 case REG:
3366 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3367 in front of this insn and substitute the temporary. */
3368 if (x == virtual_incoming_args_rtx)
3369 new = arg_pointer_rtx, offset = in_arg_offset;
3370 else if (x == virtual_stack_vars_rtx)
3371 new = frame_pointer_rtx, offset = var_offset;
3372 else if (x == virtual_stack_dynamic_rtx)
3373 new = stack_pointer_rtx, offset = dynamic_offset;
3374 else if (x == virtual_outgoing_args_rtx)
3375 new = stack_pointer_rtx, offset = out_arg_offset;
3377 if (new)
3379 temp = plus_constant (new, offset);
3380 if (!validate_change (object, loc, temp, 0))
3382 if (! extra_insns)
3383 return 0;
3385 start_sequence ();
3386 temp = force_operand (temp, NULL_RTX);
3387 seq = get_insns ();
3388 end_sequence ();
3390 emit_insns_before (seq, object);
3391 if (! validate_change (object, loc, temp, 0)
3392 && ! validate_replace_rtx (x, temp, object))
3393 abort ();
3397 return 1;
3399 case ADDRESSOF:
3400 if (GET_CODE (XEXP (x, 0)) == REG)
3401 return 1;
3403 else if (GET_CODE (XEXP (x, 0)) == MEM)
3405 /* If we have a (addressof (mem ..)), do any instantiation inside
3406 since we know we'll be making the inside valid when we finally
3407 remove the ADDRESSOF. */
3408 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3409 return 1;
3411 break;
3413 default:
3414 break;
3417 /* Scan all subexpressions. */
3418 fmt = GET_RTX_FORMAT (code);
3419 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3420 if (*fmt == 'e')
3422 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3423 return 0;
3425 else if (*fmt == 'E')
3426 for (j = 0; j < XVECLEN (x, i); j++)
3427 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3428 extra_insns))
3429 return 0;
3431 return 1;
3434 /* Optimization: assuming this function does not receive nonlocal gotos,
3435 delete the handlers for such, as well as the insns to establish
3436 and disestablish them. */
3438 static void
3439 delete_handlers ()
3441 rtx insn;
3442 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3444 /* Delete the handler by turning off the flag that would
3445 prevent jump_optimize from deleting it.
3446 Also permit deletion of the nonlocal labels themselves
3447 if nothing local refers to them. */
3448 if (GET_CODE (insn) == CODE_LABEL)
3450 tree t, last_t;
3452 LABEL_PRESERVE_P (insn) = 0;
3454 /* Remove it from the nonlocal_label list, to avoid confusing
3455 flow. */
3456 for (t = nonlocal_labels, last_t = 0; t;
3457 last_t = t, t = TREE_CHAIN (t))
3458 if (DECL_RTL (TREE_VALUE (t)) == insn)
3459 break;
3460 if (t)
3462 if (! last_t)
3463 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3464 else
3465 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3468 if (GET_CODE (insn) == INSN
3469 && ((nonlocal_goto_handler_slot != 0
3470 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3471 || (nonlocal_goto_stack_level != 0
3472 && reg_mentioned_p (nonlocal_goto_stack_level,
3473 PATTERN (insn)))))
3474 delete_insn (insn);
3478 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3479 of the current function. */
3482 nonlocal_label_rtx_list ()
3484 tree t;
3485 rtx x = 0;
3487 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3488 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3490 return x;
3493 /* Output a USE for any register use in RTL.
3494 This is used with -noreg to mark the extent of lifespan
3495 of any registers used in a user-visible variable's DECL_RTL. */
3497 void
3498 use_variable (rtl)
3499 rtx rtl;
3501 if (GET_CODE (rtl) == REG)
3502 /* This is a register variable. */
3503 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3504 else if (GET_CODE (rtl) == MEM
3505 && GET_CODE (XEXP (rtl, 0)) == REG
3506 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3507 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3508 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3509 /* This is a variable-sized structure. */
3510 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3513 /* Like use_variable except that it outputs the USEs after INSN
3514 instead of at the end of the insn-chain. */
3516 void
3517 use_variable_after (rtl, insn)
3518 rtx rtl, insn;
3520 if (GET_CODE (rtl) == REG)
3521 /* This is a register variable. */
3522 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3523 else if (GET_CODE (rtl) == MEM
3524 && GET_CODE (XEXP (rtl, 0)) == REG
3525 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3526 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3527 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3528 /* This is a variable-sized structure. */
3529 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3533 max_parm_reg_num ()
3535 return max_parm_reg;
3538 /* Return the first insn following those generated by `assign_parms'. */
3541 get_first_nonparm_insn ()
3543 if (last_parm_insn)
3544 return NEXT_INSN (last_parm_insn);
3545 return get_insns ();
3548 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3549 Crash if there is none. */
3552 get_first_block_beg ()
3554 register rtx searcher;
3555 register rtx insn = get_first_nonparm_insn ();
3557 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3558 if (GET_CODE (searcher) == NOTE
3559 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3560 return searcher;
3562 abort (); /* Invalid call to this function. (See comments above.) */
3563 return NULL_RTX;
3566 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3567 This means a type for which function calls must pass an address to the
3568 function or get an address back from the function.
3569 EXP may be a type node or an expression (whose type is tested). */
3572 aggregate_value_p (exp)
3573 tree exp;
3575 int i, regno, nregs;
3576 rtx reg;
3577 tree type;
3578 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3579 type = exp;
3580 else
3581 type = TREE_TYPE (exp);
3583 if (RETURN_IN_MEMORY (type))
3584 return 1;
3585 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3586 and thus can't be returned in registers. */
3587 if (TREE_ADDRESSABLE (type))
3588 return 1;
3589 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3590 return 1;
3591 /* Make sure we have suitable call-clobbered regs to return
3592 the value in; if not, we must return it in memory. */
3593 reg = hard_function_value (type, 0);
3595 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3596 it is OK. */
3597 if (GET_CODE (reg) != REG)
3598 return 0;
3600 regno = REGNO (reg);
3601 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3602 for (i = 0; i < nregs; i++)
3603 if (! call_used_regs[regno + i])
3604 return 1;
3605 return 0;
3608 /* Assign RTL expressions to the function's parameters.
3609 This may involve copying them into registers and using
3610 those registers as the RTL for them.
3612 If SECOND_TIME is non-zero it means that this function is being
3613 called a second time. This is done by integrate.c when a function's
3614 compilation is deferred. We need to come back here in case the
3615 FUNCTION_ARG macro computes items needed for the rest of the compilation
3616 (such as changing which registers are fixed or caller-saved). But suppress
3617 writing any insns or setting DECL_RTL of anything in this case. */
3619 void
3620 assign_parms (fndecl, second_time)
3621 tree fndecl;
3622 int second_time;
3624 register tree parm;
3625 register rtx entry_parm = 0;
3626 register rtx stack_parm = 0;
3627 CUMULATIVE_ARGS args_so_far;
3628 enum machine_mode promoted_mode, passed_mode;
3629 enum machine_mode nominal_mode, promoted_nominal_mode;
3630 int unsignedp;
3631 /* Total space needed so far for args on the stack,
3632 given as a constant and a tree-expression. */
3633 struct args_size stack_args_size;
3634 tree fntype = TREE_TYPE (fndecl);
3635 tree fnargs = DECL_ARGUMENTS (fndecl);
3636 /* This is used for the arg pointer when referring to stack args. */
3637 rtx internal_arg_pointer;
3638 /* This is a dummy PARM_DECL that we used for the function result if
3639 the function returns a structure. */
3640 tree function_result_decl = 0;
3641 int varargs_setup = 0;
3642 rtx conversion_insns = 0;
3644 /* Nonzero if the last arg is named `__builtin_va_alist',
3645 which is used on some machines for old-fashioned non-ANSI varargs.h;
3646 this should be stuck onto the stack as if it had arrived there. */
3647 int hide_last_arg
3648 = (current_function_varargs
3649 && fnargs
3650 && (parm = tree_last (fnargs)) != 0
3651 && DECL_NAME (parm)
3652 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3653 "__builtin_va_alist")));
3655 /* Nonzero if function takes extra anonymous args.
3656 This means the last named arg must be on the stack
3657 right before the anonymous ones. */
3658 int stdarg
3659 = (TYPE_ARG_TYPES (fntype) != 0
3660 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3661 != void_type_node));
3663 current_function_stdarg = stdarg;
3665 /* If the reg that the virtual arg pointer will be translated into is
3666 not a fixed reg or is the stack pointer, make a copy of the virtual
3667 arg pointer, and address parms via the copy. The frame pointer is
3668 considered fixed even though it is not marked as such.
3670 The second time through, simply use ap to avoid generating rtx. */
3672 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3673 || ! (fixed_regs[ARG_POINTER_REGNUM]
3674 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3675 && ! second_time)
3676 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3677 else
3678 internal_arg_pointer = virtual_incoming_args_rtx;
3679 current_function_internal_arg_pointer = internal_arg_pointer;
3681 stack_args_size.constant = 0;
3682 stack_args_size.var = 0;
3684 /* If struct value address is treated as the first argument, make it so. */
3685 if (aggregate_value_p (DECL_RESULT (fndecl))
3686 && ! current_function_returns_pcc_struct
3687 && struct_value_incoming_rtx == 0)
3689 tree type = build_pointer_type (TREE_TYPE (fntype));
3691 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3693 DECL_ARG_TYPE (function_result_decl) = type;
3694 TREE_CHAIN (function_result_decl) = fnargs;
3695 fnargs = function_result_decl;
3698 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3699 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3700 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3702 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3703 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3704 #else
3705 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3706 #endif
3708 /* We haven't yet found an argument that we must push and pretend the
3709 caller did. */
3710 current_function_pretend_args_size = 0;
3712 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3714 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3715 struct args_size stack_offset;
3716 struct args_size arg_size;
3717 int passed_pointer = 0;
3718 int did_conversion = 0;
3719 tree passed_type = DECL_ARG_TYPE (parm);
3720 tree nominal_type = TREE_TYPE (parm);
3722 /* Set LAST_NAMED if this is last named arg before some
3723 anonymous args. */
3724 int last_named = ((TREE_CHAIN (parm) == 0
3725 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3726 && (stdarg || current_function_varargs));
3727 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3728 most machines, if this is a varargs/stdarg function, then we treat
3729 the last named arg as if it were anonymous too. */
3730 #ifdef STRICT_ARGUMENT_NAMING
3731 int named_arg = 1;
3732 #else
3733 int named_arg = ! last_named;
3734 #endif
3736 if (TREE_TYPE (parm) == error_mark_node
3737 /* This can happen after weird syntax errors
3738 or if an enum type is defined among the parms. */
3739 || TREE_CODE (parm) != PARM_DECL
3740 || passed_type == NULL)
3742 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3743 const0_rtx);
3744 TREE_USED (parm) = 1;
3745 continue;
3748 /* For varargs.h function, save info about regs and stack space
3749 used by the individual args, not including the va_alist arg. */
3750 if (hide_last_arg && last_named)
3751 current_function_args_info = args_so_far;
3753 /* Find mode of arg as it is passed, and mode of arg
3754 as it should be during execution of this function. */
3755 passed_mode = TYPE_MODE (passed_type);
3756 nominal_mode = TYPE_MODE (nominal_type);
3758 /* If the parm's mode is VOID, its value doesn't matter,
3759 and avoid the usual things like emit_move_insn that could crash. */
3760 if (nominal_mode == VOIDmode)
3762 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3763 continue;
3766 /* If the parm is to be passed as a transparent union, use the
3767 type of the first field for the tests below. We have already
3768 verified that the modes are the same. */
3769 if (DECL_TRANSPARENT_UNION (parm)
3770 || TYPE_TRANSPARENT_UNION (passed_type))
3771 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3773 /* See if this arg was passed by invisible reference. It is if
3774 it is an object whose size depends on the contents of the
3775 object itself or if the machine requires these objects be passed
3776 that way. */
3778 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3779 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3780 || TREE_ADDRESSABLE (passed_type)
3781 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3782 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3783 passed_type, named_arg)
3784 #endif
3787 passed_type = nominal_type = build_pointer_type (passed_type);
3788 passed_pointer = 1;
3789 passed_mode = nominal_mode = Pmode;
3792 promoted_mode = passed_mode;
3794 #ifdef PROMOTE_FUNCTION_ARGS
3795 /* Compute the mode in which the arg is actually extended to. */
3796 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3797 #endif
3799 /* Let machine desc say which reg (if any) the parm arrives in.
3800 0 means it arrives on the stack. */
3801 #ifdef FUNCTION_INCOMING_ARG
3802 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3803 passed_type, named_arg);
3804 #else
3805 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3806 passed_type, named_arg);
3807 #endif
3809 if (entry_parm == 0)
3810 promoted_mode = passed_mode;
3812 #ifdef SETUP_INCOMING_VARARGS
3813 /* If this is the last named parameter, do any required setup for
3814 varargs or stdargs. We need to know about the case of this being an
3815 addressable type, in which case we skip the registers it
3816 would have arrived in.
3818 For stdargs, LAST_NAMED will be set for two parameters, the one that
3819 is actually the last named, and the dummy parameter. We only
3820 want to do this action once.
3822 Also, indicate when RTL generation is to be suppressed. */
3823 if (last_named && !varargs_setup)
3825 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3826 current_function_pretend_args_size,
3827 second_time);
3828 varargs_setup = 1;
3830 #endif
3832 /* Determine parm's home in the stack,
3833 in case it arrives in the stack or we should pretend it did.
3835 Compute the stack position and rtx where the argument arrives
3836 and its size.
3838 There is one complexity here: If this was a parameter that would
3839 have been passed in registers, but wasn't only because it is
3840 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3841 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3842 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3843 0 as it was the previous time. */
3845 locate_and_pad_parm (promoted_mode, passed_type,
3846 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3848 #else
3849 #ifdef FUNCTION_INCOMING_ARG
3850 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3851 passed_type,
3852 (named_arg
3853 || varargs_setup)) != 0,
3854 #else
3855 FUNCTION_ARG (args_so_far, promoted_mode,
3856 passed_type,
3857 named_arg || varargs_setup) != 0,
3858 #endif
3859 #endif
3860 fndecl, &stack_args_size, &stack_offset, &arg_size);
3862 if (! second_time)
3864 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3866 if (offset_rtx == const0_rtx)
3867 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3868 else
3869 stack_parm = gen_rtx (MEM, promoted_mode,
3870 gen_rtx (PLUS, Pmode,
3871 internal_arg_pointer, offset_rtx));
3873 /* If this is a memory ref that contains aggregate components,
3874 mark it as such for cse and loop optimize. Likewise if it
3875 is readonly. */
3876 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3877 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3880 /* If this parameter was passed both in registers and in the stack,
3881 use the copy on the stack. */
3882 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3883 entry_parm = 0;
3885 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3886 /* If this parm was passed part in regs and part in memory,
3887 pretend it arrived entirely in memory
3888 by pushing the register-part onto the stack.
3890 In the special case of a DImode or DFmode that is split,
3891 we could put it together in a pseudoreg directly,
3892 but for now that's not worth bothering with. */
3894 if (entry_parm)
3896 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3897 passed_type, named_arg);
3899 if (nregs > 0)
3901 current_function_pretend_args_size
3902 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3903 / (PARM_BOUNDARY / BITS_PER_UNIT)
3904 * (PARM_BOUNDARY / BITS_PER_UNIT));
3906 if (! second_time)
3908 /* Handle calls that pass values in multiple non-contiguous
3909 locations. The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (entry_parm) == PARALLEL)
3911 emit_group_store (validize_mem (stack_parm),
3912 entry_parm);
3913 else
3914 move_block_from_reg (REGNO (entry_parm),
3915 validize_mem (stack_parm), nregs,
3916 int_size_in_bytes (TREE_TYPE (parm)));
3918 entry_parm = stack_parm;
3921 #endif
3923 /* If we didn't decide this parm came in a register,
3924 by default it came on the stack. */
3925 if (entry_parm == 0)
3926 entry_parm = stack_parm;
3928 /* Record permanently how this parm was passed. */
3929 if (! second_time)
3930 DECL_INCOMING_RTL (parm) = entry_parm;
3932 /* If there is actually space on the stack for this parm,
3933 count it in stack_args_size; otherwise set stack_parm to 0
3934 to indicate there is no preallocated stack slot for the parm. */
3936 if (entry_parm == stack_parm
3937 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3938 /* On some machines, even if a parm value arrives in a register
3939 there is still an (uninitialized) stack slot allocated for it.
3941 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3942 whether this parameter already has a stack slot allocated,
3943 because an arg block exists only if current_function_args_size
3944 is larger than some threshold, and we haven't calculated that
3945 yet. So, for now, we just assume that stack slots never exist
3946 in this case. */
3947 || REG_PARM_STACK_SPACE (fndecl) > 0
3948 #endif
3951 stack_args_size.constant += arg_size.constant;
3952 if (arg_size.var)
3953 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3955 else
3956 /* No stack slot was pushed for this parm. */
3957 stack_parm = 0;
3959 /* Update info on where next arg arrives in registers. */
3961 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3962 passed_type, named_arg);
3964 /* If this is our second time through, we are done with this parm. */
3965 if (second_time)
3966 continue;
3968 /* If we can't trust the parm stack slot to be aligned enough
3969 for its ultimate type, don't use that slot after entry.
3970 We'll make another stack slot, if we need one. */
3972 int thisparm_boundary
3973 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3975 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3976 stack_parm = 0;
3979 /* If parm was passed in memory, and we need to convert it on entry,
3980 don't store it back in that same slot. */
3981 if (entry_parm != 0
3982 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3983 stack_parm = 0;
3985 #if 0
3986 /* Now adjust STACK_PARM to the mode and precise location
3987 where this parameter should live during execution,
3988 if we discover that it must live in the stack during execution.
3989 To make debuggers happier on big-endian machines, we store
3990 the value in the last bytes of the space available. */
3992 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3993 && stack_parm != 0)
3995 rtx offset_rtx;
3997 if (BYTES_BIG_ENDIAN
3998 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3999 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4000 - GET_MODE_SIZE (nominal_mode));
4002 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4003 if (offset_rtx == const0_rtx)
4004 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
4005 else
4006 stack_parm = gen_rtx (MEM, nominal_mode,
4007 gen_rtx (PLUS, Pmode,
4008 if (flag_check_memory_usage)
4010 push_to_sequence (conversion_insns);
4011 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4012 XEXP (stack_parm, 0), ptr_mode,
4013 GEN_INT (int_size_in_bytes
4014 (TREE_TYPE (parm))),
4015 TYPE_MODE (sizetype),
4016 GEN_INT (MEMORY_USE_RW),
4017 TYPE_MODE (integer_type_node));
4018 conversion_insns = get_insns ();
4019 end_sequence ();
4021 internal_arg_pointer, offset_rtx));
4023 /* If this is a memory ref that contains aggregate components,
4024 mark it as such for cse and loop optimize. */
4025 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4027 #endif /* 0 */
4029 #ifdef STACK_REGS
4030 /* We need this "use" info, because the gcc-register->stack-register
4031 converter in reg-stack.c needs to know which registers are active
4032 at the start of the function call. The actual parameter loading
4033 instructions are not always available then anymore, since they might
4034 have been optimised away. */
4036 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4037 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
4038 #endif
4040 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4041 in the mode in which it arrives.
4042 STACK_PARM is an RTX for a stack slot where the parameter can live
4043 during the function (in case we want to put it there).
4044 STACK_PARM is 0 if no stack slot was pushed for it.
4046 Now output code if necessary to convert ENTRY_PARM to
4047 the type in which this function declares it,
4048 and store that result in an appropriate place,
4049 which may be a pseudo reg, may be STACK_PARM,
4050 or may be a local stack slot if STACK_PARM is 0.
4052 Set DECL_RTL to that place. */
4054 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4056 /* If a BLKmode arrives in registers, copy it to a stack slot.
4057 Handle calls that pass values in multiple non-contiguous
4058 locations. The Irix 6 ABI has examples of this. */
4059 if (GET_CODE (entry_parm) == REG
4060 || GET_CODE (entry_parm) == PARALLEL)
4062 int size_stored
4063 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4064 UNITS_PER_WORD);
4066 /* Note that we will be storing an integral number of words.
4067 So we have to be careful to ensure that we allocate an
4068 integral number of words. We do this below in the
4069 assign_stack_local if space was not allocated in the argument
4070 list. If it was, this will not work if PARM_BOUNDARY is not
4071 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4072 if it becomes a problem. */
4074 if (stack_parm == 0)
4076 stack_parm
4077 = assign_stack_local (GET_MODE (entry_parm),
4078 size_stored, 0);
4080 /* If this is a memory ref that contains aggregate
4081 components, mark it as such for cse and loop optimize. */
4082 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4085 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4086 abort ();
4088 if (TREE_READONLY (parm))
4089 RTX_UNCHANGING_P (stack_parm) = 1;
4091 /* Handle calls that pass values in multiple non-contiguous
4092 locations. The Irix 6 ABI has examples of this. */
4093 if (GET_CODE (entry_parm) == PARALLEL)
4094 emit_group_store (validize_mem (stack_parm), entry_parm);
4095 else
4096 move_block_from_reg (REGNO (entry_parm),
4097 validize_mem (stack_parm),
4098 size_stored / UNITS_PER_WORD,
4099 int_size_in_bytes (TREE_TYPE (parm)));
4101 DECL_RTL (parm) = stack_parm;
4103 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4104 && ! DECL_INLINE (fndecl))
4105 /* layout_decl may set this. */
4106 || TREE_ADDRESSABLE (parm)
4107 || TREE_SIDE_EFFECTS (parm)
4108 /* If -ffloat-store specified, don't put explicit
4109 float variables into registers. */
4110 || (flag_float_store
4111 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4112 /* Always assign pseudo to structure return or item passed
4113 by invisible reference. */
4114 || passed_pointer || parm == function_result_decl)
4116 /* Store the parm in a pseudoregister during the function, but we
4117 may need to do it in a wider mode. */
4119 register rtx parmreg;
4120 int regno, regnoi, regnor;
4122 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4124 promoted_nominal_mode
4125 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4127 parmreg = gen_reg_rtx (promoted_nominal_mode);
4128 mark_user_reg (parmreg);
4130 /* If this was an item that we received a pointer to, set DECL_RTL
4131 appropriately. */
4132 if (passed_pointer)
4134 DECL_RTL (parm)
4135 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4136 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4138 else
4139 DECL_RTL (parm) = parmreg;
4141 /* Copy the value into the register. */
4142 if (nominal_mode != passed_mode
4143 || promoted_nominal_mode != promoted_mode)
4145 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4146 mode, by the caller. We now have to convert it to
4147 NOMINAL_MODE, if different. However, PARMREG may be in
4148 a different mode than NOMINAL_MODE if it is being stored
4149 promoted.
4151 If ENTRY_PARM is a hard register, it might be in a register
4152 not valid for operating in its mode (e.g., an odd-numbered
4153 register for a DFmode). In that case, moves are the only
4154 thing valid, so we can't do a convert from there. This
4155 occurs when the calling sequence allow such misaligned
4156 usages.
4158 In addition, the conversion may involve a call, which could
4159 clobber parameters which haven't been copied to pseudo
4160 registers yet. Therefore, we must first copy the parm to
4161 a pseudo reg here, and save the conversion until after all
4162 parameters have been moved. */
4164 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4166 emit_move_insn (tempreg, validize_mem (entry_parm));
4168 push_to_sequence (conversion_insns);
4169 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4171 expand_assignment (parm,
4172 make_tree (nominal_type, tempreg), 0, 0);
4173 conversion_insns = get_insns ();
4174 did_conversion = 1;
4175 end_sequence ();
4177 else
4178 emit_move_insn (parmreg, validize_mem (entry_parm));
4180 /* If we were passed a pointer but the actual value
4181 can safely live in a register, put it in one. */
4182 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4183 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4184 && ! DECL_INLINE (fndecl))
4185 /* layout_decl may set this. */
4186 || TREE_ADDRESSABLE (parm)
4187 || TREE_SIDE_EFFECTS (parm)
4188 /* If -ffloat-store specified, don't put explicit
4189 float variables into registers. */
4190 || (flag_float_store
4191 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4193 /* We can't use nominal_mode, because it will have been set to
4194 Pmode above. We must use the actual mode of the parm. */
4195 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4196 mark_user_reg (parmreg);
4197 emit_move_insn (parmreg, DECL_RTL (parm));
4198 DECL_RTL (parm) = parmreg;
4199 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4200 now the parm. */
4201 stack_parm = 0;
4203 #ifdef FUNCTION_ARG_CALLEE_COPIES
4204 /* If we are passed an arg by reference and it is our responsibility
4205 to make a copy, do it now.
4206 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4207 original argument, so we must recreate them in the call to
4208 FUNCTION_ARG_CALLEE_COPIES. */
4209 /* ??? Later add code to handle the case that if the argument isn't
4210 modified, don't do the copy. */
4212 else if (passed_pointer
4213 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4214 TYPE_MODE (DECL_ARG_TYPE (parm)),
4215 DECL_ARG_TYPE (parm),
4216 named_arg)
4217 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4219 rtx copy;
4220 tree type = DECL_ARG_TYPE (parm);
4222 /* This sequence may involve a library call perhaps clobbering
4223 registers that haven't been copied to pseudos yet. */
4225 push_to_sequence (conversion_insns);
4227 if (TYPE_SIZE (type) == 0
4228 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4229 /* This is a variable sized object. */
4230 copy = gen_rtx (MEM, BLKmode,
4231 allocate_dynamic_stack_space
4232 (expr_size (parm), NULL_RTX,
4233 TYPE_ALIGN (type)));
4234 else
4235 copy = assign_stack_temp (TYPE_MODE (type),
4236 int_size_in_bytes (type), 1);
4237 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4238 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4240 store_expr (parm, copy, 0);
4241 emit_move_insn (parmreg, XEXP (copy, 0));
4242 if (flag_check_memory_usage)
4243 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4244 XEXP (copy, 0), ptr_mode,
4245 GEN_INT (int_size_in_bytes (type)),
4246 TYPE_MODE (sizetype),
4247 GEN_INT (MEMORY_USE_RW),
4248 TYPE_MODE (integer_type_node));
4249 conversion_insns = get_insns ();
4250 did_conversion = 1;
4251 end_sequence ();
4253 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4255 /* In any case, record the parm's desired stack location
4256 in case we later discover it must live in the stack.
4258 If it is a COMPLEX value, store the stack location for both
4259 halves. */
4261 if (GET_CODE (parmreg) == CONCAT)
4262 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4263 else
4264 regno = REGNO (parmreg);
4266 if (regno >= max_parm_reg)
4268 rtx *new;
4269 int old_max_parm_reg = max_parm_reg;
4271 /* It's slow to expand this one register at a time,
4272 but it's also rare and we need max_parm_reg to be
4273 precisely correct. */
4274 max_parm_reg = regno + 1;
4275 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4276 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4277 old_max_parm_reg * sizeof (rtx));
4278 bzero ((char *) (new + old_max_parm_reg),
4279 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4280 parm_reg_stack_loc = new;
4283 if (GET_CODE (parmreg) == CONCAT)
4285 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4287 regnor = REGNO (gen_realpart (submode, parmreg));
4288 regnoi = REGNO (gen_imagpart (submode, parmreg));
4290 if (stack_parm != 0)
4292 parm_reg_stack_loc[regnor]
4293 = gen_realpart (submode, stack_parm);
4294 parm_reg_stack_loc[regnoi]
4295 = gen_imagpart (submode, stack_parm);
4297 else
4299 parm_reg_stack_loc[regnor] = 0;
4300 parm_reg_stack_loc[regnoi] = 0;
4303 else
4304 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4306 /* Mark the register as eliminable if we did no conversion
4307 and it was copied from memory at a fixed offset,
4308 and the arg pointer was not copied to a pseudo-reg.
4309 If the arg pointer is a pseudo reg or the offset formed
4310 an invalid address, such memory-equivalences
4311 as we make here would screw up life analysis for it. */
4312 if (nominal_mode == passed_mode
4313 && ! did_conversion
4314 && stack_parm != 0
4315 && GET_CODE (stack_parm) == MEM
4316 && stack_offset.var == 0
4317 && reg_mentioned_p (virtual_incoming_args_rtx,
4318 XEXP (stack_parm, 0)))
4320 rtx linsn = get_last_insn ();
4321 rtx sinsn, set;
4323 /* Mark complex types separately. */
4324 if (GET_CODE (parmreg) == CONCAT)
4325 /* Scan backwards for the set of the real and
4326 imaginary parts. */
4327 for (sinsn = linsn; sinsn != 0;
4328 sinsn = prev_nonnote_insn (sinsn))
4330 set = single_set (sinsn);
4331 if (set != 0
4332 && SET_DEST (set) == regno_reg_rtx [regnoi])
4333 REG_NOTES (sinsn)
4334 = gen_rtx (EXPR_LIST, REG_EQUIV,
4335 parm_reg_stack_loc[regnoi],
4336 REG_NOTES (sinsn));
4337 else if (set != 0
4338 && SET_DEST (set) == regno_reg_rtx [regnor])
4339 REG_NOTES (sinsn)
4340 = gen_rtx (EXPR_LIST, REG_EQUIV,
4341 parm_reg_stack_loc[regnor],
4342 REG_NOTES (sinsn));
4344 else if ((set = single_set (linsn)) != 0
4345 && SET_DEST (set) == parmreg)
4346 REG_NOTES (linsn)
4347 = gen_rtx (EXPR_LIST, REG_EQUIV,
4348 stack_parm, REG_NOTES (linsn));
4351 /* For pointer data type, suggest pointer register. */
4352 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4353 mark_reg_pointer (parmreg,
4354 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4355 / BITS_PER_UNIT));
4357 else
4359 /* Value must be stored in the stack slot STACK_PARM
4360 during function execution. */
4362 if (promoted_mode != nominal_mode)
4364 /* Conversion is required. */
4365 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4367 emit_move_insn (tempreg, validize_mem (entry_parm));
4369 push_to_sequence (conversion_insns);
4370 entry_parm = convert_to_mode (nominal_mode, tempreg,
4371 TREE_UNSIGNED (TREE_TYPE (parm)));
4372 conversion_insns = get_insns ();
4373 did_conversion = 1;
4374 end_sequence ();
4377 if (entry_parm != stack_parm)
4379 if (stack_parm == 0)
4381 stack_parm
4382 = assign_stack_local (GET_MODE (entry_parm),
4383 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4384 /* If this is a memory ref that contains aggregate components,
4385 mark it as such for cse and loop optimize. */
4386 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4389 if (promoted_mode != nominal_mode)
4391 push_to_sequence (conversion_insns);
4392 emit_move_insn (validize_mem (stack_parm),
4393 validize_mem (entry_parm));
4394 conversion_insns = get_insns ();
4395 end_sequence ();
4397 else
4398 emit_move_insn (validize_mem (stack_parm),
4399 validize_mem (entry_parm));
4401 if (flag_check_memory_usage)
4403 push_to_sequence (conversion_insns);
4404 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4405 XEXP (stack_parm, 0), ptr_mode,
4406 GEN_INT (GET_MODE_SIZE (GET_MODE
4407 (entry_parm))),
4408 TYPE_MODE (sizetype),
4409 GEN_INT (MEMORY_USE_RW),
4410 TYPE_MODE (integer_type_node));
4412 conversion_insns = get_insns ();
4413 end_sequence ();
4415 DECL_RTL (parm) = stack_parm;
4418 /* If this "parameter" was the place where we are receiving the
4419 function's incoming structure pointer, set up the result. */
4420 if (parm == function_result_decl)
4422 tree result = DECL_RESULT (fndecl);
4423 tree restype = TREE_TYPE (result);
4425 DECL_RTL (result)
4426 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4428 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4431 if (TREE_THIS_VOLATILE (parm))
4432 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4433 if (TREE_READONLY (parm))
4434 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4437 /* Output all parameter conversion instructions (possibly including calls)
4438 now that all parameters have been copied out of hard registers. */
4439 emit_insns (conversion_insns);
4441 last_parm_insn = get_last_insn ();
4443 current_function_args_size = stack_args_size.constant;
4445 /* Adjust function incoming argument size for alignment and
4446 minimum length. */
4448 #ifdef REG_PARM_STACK_SPACE
4449 #ifndef MAYBE_REG_PARM_STACK_SPACE
4450 current_function_args_size = MAX (current_function_args_size,
4451 REG_PARM_STACK_SPACE (fndecl));
4452 #endif
4453 #endif
4455 #ifdef STACK_BOUNDARY
4456 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4458 current_function_args_size
4459 = ((current_function_args_size + STACK_BYTES - 1)
4460 / STACK_BYTES) * STACK_BYTES;
4461 #endif
4463 #ifdef ARGS_GROW_DOWNWARD
4464 current_function_arg_offset_rtx
4465 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4466 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4467 size_int (-stack_args_size.constant)),
4468 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4469 #else
4470 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4471 #endif
4473 /* See how many bytes, if any, of its args a function should try to pop
4474 on return. */
4476 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4477 current_function_args_size);
4479 /* For stdarg.h function, save info about
4480 regs and stack space used by the named args. */
4482 if (!hide_last_arg)
4483 current_function_args_info = args_so_far;
4485 /* Set the rtx used for the function return value. Put this in its
4486 own variable so any optimizers that need this information don't have
4487 to include tree.h. Do this here so it gets done when an inlined
4488 function gets output. */
4490 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4493 /* Indicate whether REGNO is an incoming argument to the current function
4494 that was promoted to a wider mode. If so, return the RTX for the
4495 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4496 that REGNO is promoted from and whether the promotion was signed or
4497 unsigned. */
4499 #ifdef PROMOTE_FUNCTION_ARGS
4502 promoted_input_arg (regno, pmode, punsignedp)
4503 int regno;
4504 enum machine_mode *pmode;
4505 int *punsignedp;
4507 tree arg;
4509 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4510 arg = TREE_CHAIN (arg))
4511 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4512 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4513 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4515 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4516 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4518 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4519 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4520 && mode != DECL_MODE (arg))
4522 *pmode = DECL_MODE (arg);
4523 *punsignedp = unsignedp;
4524 return DECL_INCOMING_RTL (arg);
4528 return 0;
4531 #endif
4533 /* Compute the size and offset from the start of the stacked arguments for a
4534 parm passed in mode PASSED_MODE and with type TYPE.
4536 INITIAL_OFFSET_PTR points to the current offset into the stacked
4537 arguments.
4539 The starting offset and size for this parm are returned in *OFFSET_PTR
4540 and *ARG_SIZE_PTR, respectively.
4542 IN_REGS is non-zero if the argument will be passed in registers. It will
4543 never be set if REG_PARM_STACK_SPACE is not defined.
4545 FNDECL is the function in which the argument was defined.
4547 There are two types of rounding that are done. The first, controlled by
4548 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4549 list to be aligned to the specific boundary (in bits). This rounding
4550 affects the initial and starting offsets, but not the argument size.
4552 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4553 optionally rounds the size of the parm to PARM_BOUNDARY. The
4554 initial offset is not affected by this rounding, while the size always
4555 is and the starting offset may be. */
4557 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4558 initial_offset_ptr is positive because locate_and_pad_parm's
4559 callers pass in the total size of args so far as
4560 initial_offset_ptr. arg_size_ptr is always positive.*/
4562 void
4563 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4564 initial_offset_ptr, offset_ptr, arg_size_ptr)
4565 enum machine_mode passed_mode;
4566 tree type;
4567 int in_regs;
4568 tree fndecl;
4569 struct args_size *initial_offset_ptr;
4570 struct args_size *offset_ptr;
4571 struct args_size *arg_size_ptr;
4573 tree sizetree
4574 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4575 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4576 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4577 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4578 int reg_parm_stack_space = 0;
4580 #ifdef REG_PARM_STACK_SPACE
4581 /* If we have found a stack parm before we reach the end of the
4582 area reserved for registers, skip that area. */
4583 if (! in_regs)
4585 #ifdef MAYBE_REG_PARM_STACK_SPACE
4586 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4587 #else
4588 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4589 #endif
4590 if (reg_parm_stack_space > 0)
4592 if (initial_offset_ptr->var)
4594 initial_offset_ptr->var
4595 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4596 size_int (reg_parm_stack_space));
4597 initial_offset_ptr->constant = 0;
4599 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4600 initial_offset_ptr->constant = reg_parm_stack_space;
4603 #endif /* REG_PARM_STACK_SPACE */
4605 arg_size_ptr->var = 0;
4606 arg_size_ptr->constant = 0;
4608 #ifdef ARGS_GROW_DOWNWARD
4609 if (initial_offset_ptr->var)
4611 offset_ptr->constant = 0;
4612 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4613 initial_offset_ptr->var);
4615 else
4617 offset_ptr->constant = - initial_offset_ptr->constant;
4618 offset_ptr->var = 0;
4620 if (where_pad != none
4621 && (TREE_CODE (sizetree) != INTEGER_CST
4622 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4623 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4624 SUB_PARM_SIZE (*offset_ptr, sizetree);
4625 if (where_pad != downward)
4626 pad_to_arg_alignment (offset_ptr, boundary);
4627 if (initial_offset_ptr->var)
4629 arg_size_ptr->var = size_binop (MINUS_EXPR,
4630 size_binop (MINUS_EXPR,
4631 integer_zero_node,
4632 initial_offset_ptr->var),
4633 offset_ptr->var);
4635 else
4637 arg_size_ptr->constant = (- initial_offset_ptr->constant
4638 - offset_ptr->constant);
4640 #else /* !ARGS_GROW_DOWNWARD */
4641 pad_to_arg_alignment (initial_offset_ptr, boundary);
4642 *offset_ptr = *initial_offset_ptr;
4644 #ifdef PUSH_ROUNDING
4645 if (passed_mode != BLKmode)
4646 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4647 #endif
4649 /* Pad_below needs the pre-rounded size to know how much to pad below
4650 so this must be done before rounding up. */
4651 if (where_pad == downward
4652 /* However, BLKmode args passed in regs have their padding done elsewhere.
4653 The stack slot must be able to hold the entire register. */
4654 && !(in_regs && passed_mode == BLKmode))
4655 pad_below (offset_ptr, passed_mode, sizetree);
4657 if (where_pad != none
4658 && (TREE_CODE (sizetree) != INTEGER_CST
4659 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4660 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4662 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4663 #endif /* ARGS_GROW_DOWNWARD */
4666 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4667 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4669 static void
4670 pad_to_arg_alignment (offset_ptr, boundary)
4671 struct args_size *offset_ptr;
4672 int boundary;
4674 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4676 if (boundary > BITS_PER_UNIT)
4678 if (offset_ptr->var)
4680 offset_ptr->var =
4681 #ifdef ARGS_GROW_DOWNWARD
4682 round_down
4683 #else
4684 round_up
4685 #endif
4686 (ARGS_SIZE_TREE (*offset_ptr),
4687 boundary / BITS_PER_UNIT);
4688 offset_ptr->constant = 0; /*?*/
4690 else
4691 offset_ptr->constant =
4692 #ifdef ARGS_GROW_DOWNWARD
4693 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4694 #else
4695 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4696 #endif
4700 static void
4701 pad_below (offset_ptr, passed_mode, sizetree)
4702 struct args_size *offset_ptr;
4703 enum machine_mode passed_mode;
4704 tree sizetree;
4706 if (passed_mode != BLKmode)
4708 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4709 offset_ptr->constant
4710 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4711 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4712 - GET_MODE_SIZE (passed_mode));
4714 else
4716 if (TREE_CODE (sizetree) != INTEGER_CST
4717 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4719 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4720 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4721 /* Add it in. */
4722 ADD_PARM_SIZE (*offset_ptr, s2);
4723 SUB_PARM_SIZE (*offset_ptr, sizetree);
4728 static tree
4729 round_down (value, divisor)
4730 tree value;
4731 int divisor;
4733 return size_binop (MULT_EXPR,
4734 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4735 size_int (divisor));
4738 /* Walk the tree of blocks describing the binding levels within a function
4739 and warn about uninitialized variables.
4740 This is done after calling flow_analysis and before global_alloc
4741 clobbers the pseudo-regs to hard regs. */
4743 void
4744 uninitialized_vars_warning (block)
4745 tree block;
4747 register tree decl, sub;
4748 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4750 if (TREE_CODE (decl) == VAR_DECL
4751 /* These warnings are unreliable for and aggregates
4752 because assigning the fields one by one can fail to convince
4753 flow.c that the entire aggregate was initialized.
4754 Unions are troublesome because members may be shorter. */
4755 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4756 && DECL_RTL (decl) != 0
4757 && GET_CODE (DECL_RTL (decl)) == REG
4758 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4759 warning_with_decl (decl,
4760 "`%s' might be used uninitialized in this function");
4761 if (TREE_CODE (decl) == VAR_DECL
4762 && DECL_RTL (decl) != 0
4763 && GET_CODE (DECL_RTL (decl)) == REG
4764 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4765 warning_with_decl (decl,
4766 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4768 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4769 uninitialized_vars_warning (sub);
4772 /* Do the appropriate part of uninitialized_vars_warning
4773 but for arguments instead of local variables. */
4775 void
4776 setjmp_args_warning ()
4778 register tree decl;
4779 for (decl = DECL_ARGUMENTS (current_function_decl);
4780 decl; decl = TREE_CHAIN (decl))
4781 if (DECL_RTL (decl) != 0
4782 && GET_CODE (DECL_RTL (decl)) == REG
4783 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4784 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4787 /* If this function call setjmp, put all vars into the stack
4788 unless they were declared `register'. */
4790 void
4791 setjmp_protect (block)
4792 tree block;
4794 register tree decl, sub;
4795 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4796 if ((TREE_CODE (decl) == VAR_DECL
4797 || TREE_CODE (decl) == PARM_DECL)
4798 && DECL_RTL (decl) != 0
4799 && (GET_CODE (DECL_RTL (decl)) == REG
4800 || (GET_CODE (DECL_RTL (decl)) == MEM
4801 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4802 /* If this variable came from an inline function, it must be
4803 that it's life doesn't overlap the setjmp. If there was a
4804 setjmp in the function, it would already be in memory. We
4805 must exclude such variable because their DECL_RTL might be
4806 set to strange things such as virtual_stack_vars_rtx. */
4807 && ! DECL_FROM_INLINE (decl)
4808 && (
4809 #ifdef NON_SAVING_SETJMP
4810 /* If longjmp doesn't restore the registers,
4811 don't put anything in them. */
4812 NON_SAVING_SETJMP
4814 #endif
4815 ! DECL_REGISTER (decl)))
4816 put_var_into_stack (decl);
4817 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4818 setjmp_protect (sub);
4821 /* Like the previous function, but for args instead of local variables. */
4823 void
4824 setjmp_protect_args ()
4826 register tree decl, sub;
4827 for (decl = DECL_ARGUMENTS (current_function_decl);
4828 decl; decl = TREE_CHAIN (decl))
4829 if ((TREE_CODE (decl) == VAR_DECL
4830 || TREE_CODE (decl) == PARM_DECL)
4831 && DECL_RTL (decl) != 0
4832 && (GET_CODE (DECL_RTL (decl)) == REG
4833 || (GET_CODE (DECL_RTL (decl)) == MEM
4834 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4835 && (
4836 /* If longjmp doesn't restore the registers,
4837 don't put anything in them. */
4838 #ifdef NON_SAVING_SETJMP
4839 NON_SAVING_SETJMP
4841 #endif
4842 ! DECL_REGISTER (decl)))
4843 put_var_into_stack (decl);
4846 /* Return the context-pointer register corresponding to DECL,
4847 or 0 if it does not need one. */
4850 lookup_static_chain (decl)
4851 tree decl;
4853 tree context = decl_function_context (decl);
4854 tree link;
4856 if (context == 0
4857 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4858 return 0;
4860 /* We treat inline_function_decl as an alias for the current function
4861 because that is the inline function whose vars, types, etc.
4862 are being merged into the current function.
4863 See expand_inline_function. */
4864 if (context == current_function_decl || context == inline_function_decl)
4865 return virtual_stack_vars_rtx;
4867 for (link = context_display; link; link = TREE_CHAIN (link))
4868 if (TREE_PURPOSE (link) == context)
4869 return RTL_EXPR_RTL (TREE_VALUE (link));
4871 abort ();
4874 /* Convert a stack slot address ADDR for variable VAR
4875 (from a containing function)
4876 into an address valid in this function (using a static chain). */
4879 fix_lexical_addr (addr, var)
4880 rtx addr;
4881 tree var;
4883 rtx basereg;
4884 HOST_WIDE_INT displacement;
4885 tree context = decl_function_context (var);
4886 struct function *fp;
4887 rtx base = 0;
4889 /* If this is the present function, we need not do anything. */
4890 if (context == current_function_decl || context == inline_function_decl)
4891 return addr;
4893 for (fp = outer_function_chain; fp; fp = fp->next)
4894 if (fp->decl == context)
4895 break;
4897 if (fp == 0)
4898 abort ();
4900 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4901 addr = XEXP (XEXP (addr, 0), 0);
4903 /* Decode given address as base reg plus displacement. */
4904 if (GET_CODE (addr) == REG)
4905 basereg = addr, displacement = 0;
4906 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4907 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4908 else
4909 abort ();
4911 /* We accept vars reached via the containing function's
4912 incoming arg pointer and via its stack variables pointer. */
4913 if (basereg == fp->internal_arg_pointer)
4915 /* If reached via arg pointer, get the arg pointer value
4916 out of that function's stack frame.
4918 There are two cases: If a separate ap is needed, allocate a
4919 slot in the outer function for it and dereference it that way.
4920 This is correct even if the real ap is actually a pseudo.
4921 Otherwise, just adjust the offset from the frame pointer to
4922 compensate. */
4924 #ifdef NEED_SEPARATE_AP
4925 rtx addr;
4927 if (fp->arg_pointer_save_area == 0)
4928 fp->arg_pointer_save_area
4929 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4931 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4932 addr = memory_address (Pmode, addr);
4934 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4935 #else
4936 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4937 base = lookup_static_chain (var);
4938 #endif
4941 else if (basereg == virtual_stack_vars_rtx)
4943 /* This is the same code as lookup_static_chain, duplicated here to
4944 avoid an extra call to decl_function_context. */
4945 tree link;
4947 for (link = context_display; link; link = TREE_CHAIN (link))
4948 if (TREE_PURPOSE (link) == context)
4950 base = RTL_EXPR_RTL (TREE_VALUE (link));
4951 break;
4955 if (base == 0)
4956 abort ();
4958 /* Use same offset, relative to appropriate static chain or argument
4959 pointer. */
4960 return plus_constant (base, displacement);
4963 /* Return the address of the trampoline for entering nested fn FUNCTION.
4964 If necessary, allocate a trampoline (in the stack frame)
4965 and emit rtl to initialize its contents (at entry to this function). */
4968 trampoline_address (function)
4969 tree function;
4971 tree link;
4972 tree rtlexp;
4973 rtx tramp;
4974 struct function *fp;
4975 tree fn_context;
4977 /* Find an existing trampoline and return it. */
4978 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4979 if (TREE_PURPOSE (link) == function)
4980 return
4981 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4983 for (fp = outer_function_chain; fp; fp = fp->next)
4984 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4985 if (TREE_PURPOSE (link) == function)
4987 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4988 function);
4989 return round_trampoline_addr (tramp);
4992 /* None exists; we must make one. */
4994 /* Find the `struct function' for the function containing FUNCTION. */
4995 fp = 0;
4996 fn_context = decl_function_context (function);
4997 if (fn_context != current_function_decl
4998 && fn_context != inline_function_decl)
4999 for (fp = outer_function_chain; fp; fp = fp->next)
5000 if (fp->decl == fn_context)
5001 break;
5003 /* Allocate run-time space for this trampoline
5004 (usually in the defining function's stack frame). */
5005 #ifdef ALLOCATE_TRAMPOLINE
5006 tramp = ALLOCATE_TRAMPOLINE (fp);
5007 #else
5008 /* If rounding needed, allocate extra space
5009 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5010 #ifdef TRAMPOLINE_ALIGNMENT
5011 #define TRAMPOLINE_REAL_SIZE \
5012 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5013 #else
5014 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5015 #endif
5016 if (fp != 0)
5017 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5018 else
5019 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5020 #endif
5022 /* Record the trampoline for reuse and note it for later initialization
5023 by expand_function_end. */
5024 if (fp != 0)
5026 push_obstacks (fp->function_maybepermanent_obstack,
5027 fp->function_maybepermanent_obstack);
5028 rtlexp = make_node (RTL_EXPR);
5029 RTL_EXPR_RTL (rtlexp) = tramp;
5030 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5031 pop_obstacks ();
5033 else
5035 /* Make the RTL_EXPR node temporary, not momentary, so that the
5036 trampoline_list doesn't become garbage. */
5037 int momentary = suspend_momentary ();
5038 rtlexp = make_node (RTL_EXPR);
5039 resume_momentary (momentary);
5041 RTL_EXPR_RTL (rtlexp) = tramp;
5042 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5045 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5046 return round_trampoline_addr (tramp);
5049 /* Given a trampoline address,
5050 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5052 static rtx
5053 round_trampoline_addr (tramp)
5054 rtx tramp;
5056 #ifdef TRAMPOLINE_ALIGNMENT
5057 /* Round address up to desired boundary. */
5058 rtx temp = gen_reg_rtx (Pmode);
5059 temp = expand_binop (Pmode, add_optab, tramp,
5060 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5061 temp, 0, OPTAB_LIB_WIDEN);
5062 tramp = expand_binop (Pmode, and_optab, temp,
5063 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5064 temp, 0, OPTAB_LIB_WIDEN);
5065 #endif
5066 return tramp;
5069 /* The functions identify_blocks and reorder_blocks provide a way to
5070 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5071 duplicate portions of the RTL code. Call identify_blocks before
5072 changing the RTL, and call reorder_blocks after. */
5074 /* Put all this function's BLOCK nodes including those that are chained
5075 onto the first block into a vector, and return it.
5076 Also store in each NOTE for the beginning or end of a block
5077 the index of that block in the vector.
5078 The arguments are BLOCK, the chain of top-level blocks of the function,
5079 and INSNS, the insn chain of the function. */
5081 tree *
5082 identify_blocks (block, insns)
5083 tree block;
5084 rtx insns;
5086 int n_blocks;
5087 tree *block_vector;
5088 int *block_stack;
5089 int depth = 0;
5090 int next_block_number = 1;
5091 int current_block_number = 1;
5092 rtx insn;
5094 if (block == 0)
5095 return 0;
5097 n_blocks = all_blocks (block, 0);
5098 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5099 block_stack = (int *) alloca (n_blocks * sizeof (int));
5101 all_blocks (block, block_vector);
5103 for (insn = insns; insn; insn = NEXT_INSN (insn))
5104 if (GET_CODE (insn) == NOTE)
5106 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5108 block_stack[depth++] = current_block_number;
5109 current_block_number = next_block_number;
5110 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5112 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5114 current_block_number = block_stack[--depth];
5115 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5119 if (n_blocks != next_block_number)
5120 abort ();
5122 return block_vector;
5125 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5126 and a revised instruction chain, rebuild the tree structure
5127 of BLOCK nodes to correspond to the new order of RTL.
5128 The new block tree is inserted below TOP_BLOCK.
5129 Returns the current top-level block. */
5131 tree
5132 reorder_blocks (block_vector, block, insns)
5133 tree *block_vector;
5134 tree block;
5135 rtx insns;
5137 tree current_block = block;
5138 rtx insn;
5140 if (block_vector == 0)
5141 return block;
5143 /* Prune the old trees away, so that it doesn't get in the way. */
5144 BLOCK_SUBBLOCKS (current_block) = 0;
5145 BLOCK_CHAIN (current_block) = 0;
5147 for (insn = insns; insn; insn = NEXT_INSN (insn))
5148 if (GET_CODE (insn) == NOTE)
5150 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5152 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5153 /* If we have seen this block before, copy it. */
5154 if (TREE_ASM_WRITTEN (block))
5155 block = copy_node (block);
5156 BLOCK_SUBBLOCKS (block) = 0;
5157 TREE_ASM_WRITTEN (block) = 1;
5158 BLOCK_SUPERCONTEXT (block) = current_block;
5159 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5160 BLOCK_SUBBLOCKS (current_block) = block;
5161 current_block = block;
5162 NOTE_SOURCE_FILE (insn) = 0;
5164 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5166 BLOCK_SUBBLOCKS (current_block)
5167 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5168 current_block = BLOCK_SUPERCONTEXT (current_block);
5169 NOTE_SOURCE_FILE (insn) = 0;
5173 BLOCK_SUBBLOCKS (current_block)
5174 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5175 return current_block;
5178 /* Reverse the order of elements in the chain T of blocks,
5179 and return the new head of the chain (old last element). */
5181 static tree
5182 blocks_nreverse (t)
5183 tree t;
5185 register tree prev = 0, decl, next;
5186 for (decl = t; decl; decl = next)
5188 next = BLOCK_CHAIN (decl);
5189 BLOCK_CHAIN (decl) = prev;
5190 prev = decl;
5192 return prev;
5195 /* Count the subblocks of the list starting with BLOCK, and list them
5196 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5197 blocks. */
5199 static int
5200 all_blocks (block, vector)
5201 tree block;
5202 tree *vector;
5204 int n_blocks = 0;
5206 while (block)
5208 TREE_ASM_WRITTEN (block) = 0;
5210 /* Record this block. */
5211 if (vector)
5212 vector[n_blocks] = block;
5214 ++n_blocks;
5216 /* Record the subblocks, and their subblocks... */
5217 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5218 vector ? vector + n_blocks : 0);
5219 block = BLOCK_CHAIN (block);
5222 return n_blocks;
5225 /* Build bytecode call descriptor for function SUBR. */
5228 bc_build_calldesc (subr)
5229 tree subr;
5231 tree calldesc = 0, arg;
5232 int nargs = 0;
5234 /* Build the argument description vector in reverse order. */
5235 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
5236 nargs = 0;
5238 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
5240 ++nargs;
5242 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
5243 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
5246 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
5248 /* Prepend the function's return type. */
5249 calldesc = tree_cons ((tree) 0,
5250 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
5251 calldesc);
5253 calldesc = tree_cons ((tree) 0,
5254 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
5255 calldesc);
5257 /* Prepend the arg count. */
5258 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
5260 /* Output the call description vector and get its address. */
5261 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
5262 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
5263 build_index_type (build_int_2 (nargs * 2, 0)));
5265 return output_constant_def (calldesc);
5269 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5270 and initialize static variables for generating RTL for the statements
5271 of the function. */
5273 void
5274 init_function_start (subr, filename, line)
5275 tree subr;
5276 char *filename;
5277 int line;
5279 if (output_bytecode)
5281 this_function_decl = subr;
5282 this_function_calldesc = bc_build_calldesc (subr);
5283 local_vars_size = 0;
5284 stack_depth = 0;
5285 max_stack_depth = 0;
5286 stmt_expr_depth = 0;
5287 return;
5290 init_stmt_for_function ();
5292 cse_not_expected = ! optimize;
5294 /* Caller save not needed yet. */
5295 caller_save_needed = 0;
5297 /* No stack slots have been made yet. */
5298 stack_slot_list = 0;
5300 /* There is no stack slot for handling nonlocal gotos. */
5301 nonlocal_goto_handler_slot = 0;
5302 nonlocal_goto_stack_level = 0;
5304 /* No labels have been declared for nonlocal use. */
5305 nonlocal_labels = 0;
5307 /* No function calls so far in this function. */
5308 function_call_count = 0;
5310 /* No parm regs have been allocated.
5311 (This is important for output_inline_function.) */
5312 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5314 /* Initialize the RTL mechanism. */
5315 init_emit ();
5317 /* Initialize the queue of pending postincrement and postdecrements,
5318 and some other info in expr.c. */
5319 init_expr ();
5321 /* We haven't done register allocation yet. */
5322 reg_renumber = 0;
5324 init_const_rtx_hash_table ();
5326 current_function_name = (*decl_printable_name) (subr, 2);
5328 /* Nonzero if this is a nested function that uses a static chain. */
5330 current_function_needs_context
5331 = (decl_function_context (current_function_decl) != 0
5332 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5334 /* Set if a call to setjmp is seen. */
5335 current_function_calls_setjmp = 0;
5337 /* Set if a call to longjmp is seen. */
5338 current_function_calls_longjmp = 0;
5340 current_function_calls_alloca = 0;
5341 current_function_has_nonlocal_label = 0;
5342 current_function_has_nonlocal_goto = 0;
5343 current_function_contains_functions = 0;
5344 current_function_is_thunk = 0;
5346 current_function_returns_pcc_struct = 0;
5347 current_function_returns_struct = 0;
5348 current_function_epilogue_delay_list = 0;
5349 current_function_uses_const_pool = 0;
5350 current_function_uses_pic_offset_table = 0;
5352 /* We have not yet needed to make a label to jump to for tail-recursion. */
5353 tail_recursion_label = 0;
5355 /* We haven't had a need to make a save area for ap yet. */
5357 arg_pointer_save_area = 0;
5359 /* No stack slots allocated yet. */
5360 frame_offset = 0;
5362 /* No SAVE_EXPRs in this function yet. */
5363 save_expr_regs = 0;
5365 /* No RTL_EXPRs in this function yet. */
5366 rtl_expr_chain = 0;
5368 /* Set up to allocate temporaries. */
5369 init_temp_slots ();
5371 /* Within function body, compute a type's size as soon it is laid out. */
5372 immediate_size_expand++;
5374 /* We haven't made any trampolines for this function yet. */
5375 trampoline_list = 0;
5377 init_pending_stack_adjust ();
5378 inhibit_defer_pop = 0;
5380 current_function_outgoing_args_size = 0;
5382 /* Prevent ever trying to delete the first instruction of a function.
5383 Also tell final how to output a linenum before the function prologue. */
5384 emit_line_note (filename, line);
5386 /* Make sure first insn is a note even if we don't want linenums.
5387 This makes sure the first insn will never be deleted.
5388 Also, final expects a note to appear there. */
5389 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5391 /* Set flags used by final.c. */
5392 if (aggregate_value_p (DECL_RESULT (subr)))
5394 #ifdef PCC_STATIC_STRUCT_RETURN
5395 current_function_returns_pcc_struct = 1;
5396 #endif
5397 current_function_returns_struct = 1;
5400 /* Warn if this value is an aggregate type,
5401 regardless of which calling convention we are using for it. */
5402 if (warn_aggregate_return
5403 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5404 warning ("function returns an aggregate");
5406 current_function_returns_pointer
5407 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5409 /* Indicate that we need to distinguish between the return value of the
5410 present function and the return value of a function being called. */
5411 rtx_equal_function_value_matters = 1;
5413 /* Indicate that we have not instantiated virtual registers yet. */
5414 virtuals_instantiated = 0;
5416 /* Indicate we have no need of a frame pointer yet. */
5417 frame_pointer_needed = 0;
5419 /* By default assume not varargs or stdarg. */
5420 current_function_varargs = 0;
5421 current_function_stdarg = 0;
5424 /* Indicate that the current function uses extra args
5425 not explicitly mentioned in the argument list in any fashion. */
5427 void
5428 mark_varargs ()
5430 current_function_varargs = 1;
5433 /* Expand a call to __main at the beginning of a possible main function. */
5435 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5436 #undef HAS_INIT_SECTION
5437 #define HAS_INIT_SECTION
5438 #endif
5440 void
5441 expand_main_function ()
5443 if (!output_bytecode)
5445 /* The zero below avoids a possible parse error */
5447 #if !defined (HAS_INIT_SECTION)
5448 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5449 VOIDmode, 0);
5450 #endif /* not HAS_INIT_SECTION */
5454 extern struct obstack permanent_obstack;
5456 /* Expand start of bytecode function. See comment at
5457 expand_function_start below for details. */
5459 void
5460 bc_expand_function_start (subr, parms_have_cleanups)
5461 tree subr;
5462 int parms_have_cleanups;
5464 char label[20], *name;
5465 static int nlab;
5466 tree thisarg;
5467 int argsz;
5469 if (TREE_PUBLIC (subr))
5470 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5472 #ifdef DEBUG_PRINT_CODE
5473 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5474 #endif
5476 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5478 if (DECL_RTL (thisarg))
5479 abort (); /* Should be NULL here I think. */
5480 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5482 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5483 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5485 else
5487 /* Variable-sized objects are pointers to their storage. */
5488 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5489 argsz += POINTER_SIZE;
5493 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5495 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5497 ++nlab;
5498 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5499 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5500 this_function_bytecode
5501 = bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5505 /* Expand end of bytecode function. See details the comment of
5506 expand_function_end(), below. */
5508 void
5509 bc_expand_function_end ()
5511 char *ptrconsts;
5513 expand_null_return ();
5515 /* Emit any fixup code. This must be done before the call to
5516 to BC_END_FUNCTION (), since that will cause the bytecode
5517 segment to be finished off and closed. */
5519 expand_fixups (NULL_RTX);
5521 ptrconsts = bc_end_function ();
5523 bc_align_const (2 /* INT_ALIGN */);
5525 /* If this changes also make sure to change bc-interp.h! */
5527 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5528 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5529 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5530 bc_emit_const_labelref (this_function_bytecode, 0);
5531 bc_emit_const_labelref (ptrconsts, 0);
5532 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5536 /* Start the RTL for a new function, and set variables used for
5537 emitting RTL.
5538 SUBR is the FUNCTION_DECL node.
5539 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5540 the function's parameters, which must be run at any return statement. */
5542 void
5543 expand_function_start (subr, parms_have_cleanups)
5544 tree subr;
5545 int parms_have_cleanups;
5547 register int i;
5548 tree tem;
5549 rtx last_ptr;
5551 if (output_bytecode)
5553 bc_expand_function_start (subr, parms_have_cleanups);
5554 return;
5557 /* Make sure volatile mem refs aren't considered
5558 valid operands of arithmetic insns. */
5559 init_recog_no_volatile ();
5561 /* If function gets a static chain arg, store it in the stack frame.
5562 Do this first, so it gets the first stack slot offset. */
5563 if (current_function_needs_context)
5565 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5567 /* Delay copying static chain if it is not a register to avoid
5568 conflicts with regs used for parameters. */
5569 if (! SMALL_REGISTER_CLASSES
5570 || GET_CODE (static_chain_incoming_rtx) == REG)
5571 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5574 /* If the parameters of this function need cleaning up, get a label
5575 for the beginning of the code which executes those cleanups. This must
5576 be done before doing anything with return_label. */
5577 if (parms_have_cleanups)
5578 cleanup_label = gen_label_rtx ();
5579 else
5580 cleanup_label = 0;
5582 /* Make the label for return statements to jump to, if this machine
5583 does not have a one-instruction return and uses an epilogue,
5584 or if it returns a structure, or if it has parm cleanups. */
5585 #ifdef HAVE_return
5586 if (cleanup_label == 0 && HAVE_return
5587 && ! current_function_returns_pcc_struct
5588 && ! (current_function_returns_struct && ! optimize))
5589 return_label = 0;
5590 else
5591 return_label = gen_label_rtx ();
5592 #else
5593 return_label = gen_label_rtx ();
5594 #endif
5596 /* Initialize rtx used to return the value. */
5597 /* Do this before assign_parms so that we copy the struct value address
5598 before any library calls that assign parms might generate. */
5600 /* Decide whether to return the value in memory or in a register. */
5601 if (aggregate_value_p (DECL_RESULT (subr)))
5603 /* Returning something that won't go in a register. */
5604 register rtx value_address = 0;
5606 #ifdef PCC_STATIC_STRUCT_RETURN
5607 if (current_function_returns_pcc_struct)
5609 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5610 value_address = assemble_static_space (size);
5612 else
5613 #endif
5615 /* Expect to be passed the address of a place to store the value.
5616 If it is passed as an argument, assign_parms will take care of
5617 it. */
5618 if (struct_value_incoming_rtx)
5620 value_address = gen_reg_rtx (Pmode);
5621 emit_move_insn (value_address, struct_value_incoming_rtx);
5624 if (value_address)
5626 DECL_RTL (DECL_RESULT (subr))
5627 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5628 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5629 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5632 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5633 /* If return mode is void, this decl rtl should not be used. */
5634 DECL_RTL (DECL_RESULT (subr)) = 0;
5635 else if (parms_have_cleanups)
5637 /* If function will end with cleanup code for parms,
5638 compute the return values into a pseudo reg,
5639 which we will copy into the true return register
5640 after the cleanups are done. */
5642 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5644 #ifdef PROMOTE_FUNCTION_RETURN
5645 tree type = TREE_TYPE (DECL_RESULT (subr));
5646 int unsignedp = TREE_UNSIGNED (type);
5648 mode = promote_mode (type, mode, &unsignedp, 1);
5649 #endif
5651 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5653 else
5654 /* Scalar, returned in a register. */
5656 #ifdef FUNCTION_OUTGOING_VALUE
5657 DECL_RTL (DECL_RESULT (subr))
5658 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5659 #else
5660 DECL_RTL (DECL_RESULT (subr))
5661 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5662 #endif
5664 /* Mark this reg as the function's return value. */
5665 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5667 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5668 /* Needed because we may need to move this to memory
5669 in case it's a named return value whose address is taken. */
5670 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5674 /* Initialize rtx for parameters and local variables.
5675 In some cases this requires emitting insns. */
5677 assign_parms (subr, 0);
5679 /* Copy the static chain now if it wasn't a register. The delay is to
5680 avoid conflicts with the parameter passing registers. */
5682 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5683 if (GET_CODE (static_chain_incoming_rtx) != REG)
5684 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5686 /* The following was moved from init_function_start.
5687 The move is supposed to make sdb output more accurate. */
5688 /* Indicate the beginning of the function body,
5689 as opposed to parm setup. */
5690 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5692 /* If doing stupid allocation, mark parms as born here. */
5694 if (GET_CODE (get_last_insn ()) != NOTE)
5695 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5696 parm_birth_insn = get_last_insn ();
5698 if (obey_regdecls)
5700 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5701 use_variable (regno_reg_rtx[i]);
5703 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5704 use_variable (current_function_internal_arg_pointer);
5707 context_display = 0;
5708 if (current_function_needs_context)
5710 /* Fetch static chain values for containing functions. */
5711 tem = decl_function_context (current_function_decl);
5712 /* If not doing stupid register allocation copy the static chain
5713 pointer into a pseudo. If we have small register classes, copy
5714 the value from memory if static_chain_incoming_rtx is a REG. If
5715 we do stupid register allocation, we use the stack address
5716 generated above. */
5717 if (tem && ! obey_regdecls)
5719 /* If the static chain originally came in a register, put it back
5720 there, then move it out in the next insn. The reason for
5721 this peculiar code is to satisfy function integration. */
5722 if (SMALL_REGISTER_CLASSES
5723 && GET_CODE (static_chain_incoming_rtx) == REG)
5724 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5725 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5728 while (tem)
5730 tree rtlexp = make_node (RTL_EXPR);
5732 RTL_EXPR_RTL (rtlexp) = last_ptr;
5733 context_display = tree_cons (tem, rtlexp, context_display);
5734 tem = decl_function_context (tem);
5735 if (tem == 0)
5736 break;
5737 /* Chain thru stack frames, assuming pointer to next lexical frame
5738 is found at the place we always store it. */
5739 #ifdef FRAME_GROWS_DOWNWARD
5740 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5741 #endif
5742 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5743 memory_address (Pmode, last_ptr)));
5745 /* If we are not optimizing, ensure that we know that this
5746 piece of context is live over the entire function. */
5747 if (! optimize)
5748 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5749 save_expr_regs);
5753 /* After the display initializations is where the tail-recursion label
5754 should go, if we end up needing one. Ensure we have a NOTE here
5755 since some things (like trampolines) get placed before this. */
5756 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5758 /* Evaluate now the sizes of any types declared among the arguments. */
5759 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5761 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5762 EXPAND_MEMORY_USE_BAD);
5763 /* Flush the queue in case this parameter declaration has
5764 side-effects. */
5765 emit_queue ();
5768 /* Make sure there is a line number after the function entry setup code. */
5769 force_next_line_note ();
5772 /* Generate RTL for the end of the current function.
5773 FILENAME and LINE are the current position in the source file.
5775 It is up to language-specific callers to do cleanups for parameters--
5776 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5778 void
5779 expand_function_end (filename, line, end_bindings)
5780 char *filename;
5781 int line;
5782 int end_bindings;
5784 register int i;
5785 tree link;
5787 #ifdef TRAMPOLINE_TEMPLATE
5788 static rtx initial_trampoline;
5789 #endif
5791 if (output_bytecode)
5793 bc_expand_function_end ();
5794 return;
5797 #ifdef NON_SAVING_SETJMP
5798 /* Don't put any variables in registers if we call setjmp
5799 on a machine that fails to restore the registers. */
5800 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5802 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5803 setjmp_protect (DECL_INITIAL (current_function_decl));
5805 setjmp_protect_args ();
5807 #endif
5809 /* Save the argument pointer if a save area was made for it. */
5810 if (arg_pointer_save_area)
5812 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5813 emit_insn_before (x, tail_recursion_reentry);
5816 /* Initialize any trampolines required by this function. */
5817 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5819 tree function = TREE_PURPOSE (link);
5820 rtx context = lookup_static_chain (function);
5821 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5822 rtx blktramp;
5823 rtx seq;
5825 #ifdef TRAMPOLINE_TEMPLATE
5826 /* First make sure this compilation has a template for
5827 initializing trampolines. */
5828 if (initial_trampoline == 0)
5830 end_temporary_allocation ();
5831 initial_trampoline
5832 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5833 resume_temporary_allocation ();
5835 #endif
5837 /* Generate insns to initialize the trampoline. */
5838 start_sequence ();
5839 tramp = round_trampoline_addr (XEXP (tramp, 0));
5840 #ifdef TRAMPOLINE_TEMPLATE
5841 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5842 emit_block_move (blktramp, initial_trampoline,
5843 GEN_INT (TRAMPOLINE_SIZE),
5844 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5845 #endif
5846 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5847 seq = get_insns ();
5848 end_sequence ();
5850 /* Put those insns at entry to the containing function (this one). */
5851 emit_insns_before (seq, tail_recursion_reentry);
5854 /* If we are doing stack checking and this function makes calls,
5855 do a stack probe at the start of the function to ensure we have enough
5856 space for another stack frame. */
5857 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5859 rtx insn, seq;
5861 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5862 if (GET_CODE (insn) == CALL_INSN)
5864 start_sequence ();
5865 probe_stack_range (STACK_CHECK_PROTECT,
5866 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5867 seq = get_insns ();
5868 end_sequence ();
5869 emit_insns_before (seq, tail_recursion_reentry);
5870 break;
5874 /* Warn about unused parms if extra warnings were specified. */
5875 if (warn_unused && extra_warnings)
5877 tree decl;
5879 for (decl = DECL_ARGUMENTS (current_function_decl);
5880 decl; decl = TREE_CHAIN (decl))
5881 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5882 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5883 warning_with_decl (decl, "unused parameter `%s'");
5886 /* Delete handlers for nonlocal gotos if nothing uses them. */
5887 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5888 delete_handlers ();
5890 /* End any sequences that failed to be closed due to syntax errors. */
5891 while (in_sequence_p ())
5892 end_sequence ();
5894 /* Outside function body, can't compute type's actual size
5895 until next function's body starts. */
5896 immediate_size_expand--;
5898 /* If doing stupid register allocation,
5899 mark register parms as dying here. */
5901 if (obey_regdecls)
5903 rtx tem;
5904 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5905 use_variable (regno_reg_rtx[i]);
5907 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5909 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5911 use_variable (XEXP (tem, 0));
5912 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5915 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5916 use_variable (current_function_internal_arg_pointer);
5919 clear_pending_stack_adjust ();
5920 do_pending_stack_adjust ();
5922 /* Mark the end of the function body.
5923 If control reaches this insn, the function can drop through
5924 without returning a value. */
5925 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5927 /* Must mark the last line number note in the function, so that the test
5928 coverage code can avoid counting the last line twice. This just tells
5929 the code to ignore the immediately following line note, since there
5930 already exists a copy of this note somewhere above. This line number
5931 note is still needed for debugging though, so we can't delete it. */
5932 if (flag_test_coverage)
5933 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5935 /* Output a linenumber for the end of the function.
5936 SDB depends on this. */
5937 emit_line_note_force (filename, line);
5939 /* Output the label for the actual return from the function,
5940 if one is expected. This happens either because a function epilogue
5941 is used instead of a return instruction, or because a return was done
5942 with a goto in order to run local cleanups, or because of pcc-style
5943 structure returning. */
5945 if (return_label)
5946 emit_label (return_label);
5948 /* C++ uses this. */
5949 if (end_bindings)
5950 expand_end_bindings (0, 0, 0);
5952 /* Now handle any leftover exception regions that may have been
5953 created for the parameters. */
5955 rtx last = get_last_insn ();
5956 rtx label;
5958 expand_leftover_cleanups ();
5960 /* If the above emitted any code, may sure we jump around it. */
5961 if (last != get_last_insn ())
5963 label = gen_label_rtx ();
5964 last = emit_jump_insn_after (gen_jump (label), last);
5965 last = emit_barrier_after (last);
5966 emit_label (label);
5970 /* If we had calls to alloca, and this machine needs
5971 an accurate stack pointer to exit the function,
5972 insert some code to save and restore the stack pointer. */
5973 #ifdef EXIT_IGNORE_STACK
5974 if (! EXIT_IGNORE_STACK)
5975 #endif
5976 if (current_function_calls_alloca)
5978 rtx tem = 0;
5980 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5981 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5984 /* If scalar return value was computed in a pseudo-reg,
5985 copy that to the hard return register. */
5986 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5987 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5988 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5989 >= FIRST_PSEUDO_REGISTER))
5991 rtx real_decl_result;
5993 #ifdef FUNCTION_OUTGOING_VALUE
5994 real_decl_result
5995 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5996 current_function_decl);
5997 #else
5998 real_decl_result
5999 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6000 current_function_decl);
6001 #endif
6002 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6003 /* If this is a BLKmode structure being returned in registers, then use
6004 the mode computed in expand_return. */
6005 if (GET_MODE (real_decl_result) == BLKmode)
6006 PUT_MODE (real_decl_result,
6007 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6008 emit_move_insn (real_decl_result,
6009 DECL_RTL (DECL_RESULT (current_function_decl)));
6010 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
6012 /* The delay slot scheduler assumes that current_function_return_rtx
6013 holds the hard register containing the return value, not a temporary
6014 pseudo. */
6015 current_function_return_rtx = real_decl_result;
6018 /* If returning a structure, arrange to return the address of the value
6019 in a place where debuggers expect to find it.
6021 If returning a structure PCC style,
6022 the caller also depends on this value.
6023 And current_function_returns_pcc_struct is not necessarily set. */
6024 if (current_function_returns_struct
6025 || current_function_returns_pcc_struct)
6027 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6028 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6029 #ifdef FUNCTION_OUTGOING_VALUE
6030 rtx outgoing
6031 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6032 current_function_decl);
6033 #else
6034 rtx outgoing
6035 = FUNCTION_VALUE (build_pointer_type (type),
6036 current_function_decl);
6037 #endif
6039 /* Mark this as a function return value so integrate will delete the
6040 assignment and USE below when inlining this function. */
6041 REG_FUNCTION_VALUE_P (outgoing) = 1;
6043 emit_move_insn (outgoing, value_address);
6044 use_variable (outgoing);
6047 /* Output a return insn if we are using one.
6048 Otherwise, let the rtl chain end here, to drop through
6049 into the epilogue. */
6051 #ifdef HAVE_return
6052 if (HAVE_return)
6054 emit_jump_insn (gen_return ());
6055 emit_barrier ();
6057 #endif
6059 /* Fix up any gotos that jumped out to the outermost
6060 binding level of the function.
6061 Must follow emitting RETURN_LABEL. */
6063 /* If you have any cleanups to do at this point,
6064 and they need to create temporary variables,
6065 then you will lose. */
6066 expand_fixups (get_insns ());
6069 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6071 static int *prologue;
6072 static int *epilogue;
6074 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6075 or a single insn). */
6077 static int *
6078 record_insns (insns)
6079 rtx insns;
6081 int *vec;
6083 if (GET_CODE (insns) == SEQUENCE)
6085 int len = XVECLEN (insns, 0);
6086 vec = (int *) oballoc ((len + 1) * sizeof (int));
6087 vec[len] = 0;
6088 while (--len >= 0)
6089 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6091 else
6093 vec = (int *) oballoc (2 * sizeof (int));
6094 vec[0] = INSN_UID (insns);
6095 vec[1] = 0;
6097 return vec;
6100 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6102 static int
6103 contains (insn, vec)
6104 rtx insn;
6105 int *vec;
6107 register int i, j;
6109 if (GET_CODE (insn) == INSN
6110 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6112 int count = 0;
6113 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6114 for (j = 0; vec[j]; j++)
6115 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6116 count++;
6117 return count;
6119 else
6121 for (j = 0; vec[j]; j++)
6122 if (INSN_UID (insn) == vec[j])
6123 return 1;
6125 return 0;
6128 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6129 this into place with notes indicating where the prologue ends and where
6130 the epilogue begins. Update the basic block information when possible. */
6132 void
6133 thread_prologue_and_epilogue_insns (f)
6134 rtx f;
6136 #ifdef HAVE_prologue
6137 if (HAVE_prologue)
6139 rtx head, seq, insn;
6141 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6142 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6143 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6144 seq = gen_prologue ();
6145 head = emit_insn_after (seq, f);
6147 /* Include the new prologue insns in the first block. Ignore them
6148 if they form a basic block unto themselves. */
6149 if (basic_block_head && n_basic_blocks
6150 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6151 basic_block_head[0] = NEXT_INSN (f);
6153 /* Retain a map of the prologue insns. */
6154 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6156 else
6157 #endif
6158 prologue = 0;
6160 #ifdef HAVE_epilogue
6161 if (HAVE_epilogue)
6163 rtx insn = get_last_insn ();
6164 rtx prev = prev_nonnote_insn (insn);
6166 /* If we end with a BARRIER, we don't need an epilogue. */
6167 if (! (prev && GET_CODE (prev) == BARRIER))
6169 rtx tail, seq, tem;
6170 rtx first_use = 0;
6171 rtx last_use = 0;
6173 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6174 epilogue insns, the USE insns at the end of a function,
6175 the jump insn that returns, and then a BARRIER. */
6177 /* Move the USE insns at the end of a function onto a list. */
6178 while (prev
6179 && GET_CODE (prev) == INSN
6180 && GET_CODE (PATTERN (prev)) == USE)
6182 tem = prev;
6183 prev = prev_nonnote_insn (prev);
6185 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6186 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6187 if (first_use)
6189 NEXT_INSN (tem) = first_use;
6190 PREV_INSN (first_use) = tem;
6192 first_use = tem;
6193 if (!last_use)
6194 last_use = tem;
6197 emit_barrier_after (insn);
6199 seq = gen_epilogue ();
6200 tail = emit_jump_insn_after (seq, insn);
6202 /* Insert the USE insns immediately before the return insn, which
6203 must be the first instruction before the final barrier. */
6204 if (first_use)
6206 tem = prev_nonnote_insn (get_last_insn ());
6207 NEXT_INSN (PREV_INSN (tem)) = first_use;
6208 PREV_INSN (first_use) = PREV_INSN (tem);
6209 PREV_INSN (tem) = last_use;
6210 NEXT_INSN (last_use) = tem;
6213 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6215 /* Include the new epilogue insns in the last block. Ignore
6216 them if they form a basic block unto themselves. */
6217 if (basic_block_end && n_basic_blocks
6218 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6219 basic_block_end[n_basic_blocks - 1] = tail;
6221 /* Retain a map of the epilogue insns. */
6222 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6223 return;
6226 #endif
6227 epilogue = 0;
6230 /* Reposition the prologue-end and epilogue-begin notes after instruction
6231 scheduling and delayed branch scheduling. */
6233 void
6234 reposition_prologue_and_epilogue_notes (f)
6235 rtx f;
6237 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6238 /* Reposition the prologue and epilogue notes. */
6239 if (n_basic_blocks)
6241 rtx next, prev;
6242 int len;
6244 if (prologue)
6246 register rtx insn, note = 0;
6248 /* Scan from the beginning until we reach the last prologue insn.
6249 We apparently can't depend on basic_block_{head,end} after
6250 reorg has run. */
6251 for (len = 0; prologue[len]; len++)
6253 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6255 if (GET_CODE (insn) == NOTE)
6257 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6258 note = insn;
6260 else if ((len -= contains (insn, prologue)) == 0)
6262 /* Find the prologue-end note if we haven't already, and
6263 move it to just after the last prologue insn. */
6264 if (note == 0)
6266 for (note = insn; note = NEXT_INSN (note);)
6267 if (GET_CODE (note) == NOTE
6268 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6269 break;
6271 next = NEXT_INSN (note);
6272 prev = PREV_INSN (note);
6273 if (prev)
6274 NEXT_INSN (prev) = next;
6275 if (next)
6276 PREV_INSN (next) = prev;
6277 add_insn_after (note, insn);
6282 if (epilogue)
6284 register rtx insn, note = 0;
6286 /* Scan from the end until we reach the first epilogue insn.
6287 We apparently can't depend on basic_block_{head,end} after
6288 reorg has run. */
6289 for (len = 0; epilogue[len]; len++)
6291 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6293 if (GET_CODE (insn) == NOTE)
6295 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6296 note = insn;
6298 else if ((len -= contains (insn, epilogue)) == 0)
6300 /* Find the epilogue-begin note if we haven't already, and
6301 move it to just before the first epilogue insn. */
6302 if (note == 0)
6304 for (note = insn; note = PREV_INSN (note);)
6305 if (GET_CODE (note) == NOTE
6306 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6307 break;
6309 next = NEXT_INSN (note);
6310 prev = PREV_INSN (note);
6311 if (prev)
6312 NEXT_INSN (prev) = next;
6313 if (next)
6314 PREV_INSN (next) = prev;
6315 add_insn_after (note, PREV_INSN (insn));
6320 #endif /* HAVE_prologue or HAVE_epilogue */