Remove some out of date comments.
[official-gcc.git] / gcc / function.c
blob67594cba8880ec1d00d7f4268d97920ffe356ceb
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
63 #endif
65 /* Some systems use __main in a way incompatible with its use in gcc, in these
66 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
67 give the same symbol without quotes for an alternative entry point. You
68 must define both, or neither. */
69 #ifndef NAME__MAIN
70 #define NAME__MAIN "__main"
71 #define SYMBOL__MAIN __main
72 #endif
74 /* Round a value to the lowest integer less than it that is a multiple of
75 the required alignment. Avoid using division in case the value is
76 negative. Assume the alignment is a power of two. */
77 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
79 /* Similar, but round to the next highest integer that meets the
80 alignment. */
81 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
83 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
84 during rtl generation. If they are different register numbers, this is
85 always true. It may also be true if
86 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
87 generation. See fix_lexical_addr for details. */
89 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
90 #define NEED_SEPARATE_AP
91 #endif
93 /* Number of bytes of args popped by function being compiled on its return.
94 Zero if no bytes are to be popped.
95 May affect compilation of return insn or of function epilogue. */
97 int current_function_pops_args;
99 /* Nonzero if function being compiled needs to be given an address
100 where the value should be stored. */
102 int current_function_returns_struct;
104 /* Nonzero if function being compiled needs to
105 return the address of where it has put a structure value. */
107 int current_function_returns_pcc_struct;
109 /* Nonzero if function being compiled needs to be passed a static chain. */
111 int current_function_needs_context;
113 /* Nonzero if function being compiled can call setjmp. */
115 int current_function_calls_setjmp;
117 /* Nonzero if function being compiled can call longjmp. */
119 int current_function_calls_longjmp;
121 /* Nonzero if function being compiled receives nonlocal gotos
122 from nested functions. */
124 int current_function_has_nonlocal_label;
126 /* Nonzero if function being compiled has nonlocal gotos to parent
127 function. */
129 int current_function_has_nonlocal_goto;
131 /* Nonzero if function being compiled contains nested functions. */
133 int current_function_contains_functions;
135 /* Nonzero if function being compiled can call alloca,
136 either as a subroutine or builtin. */
138 int current_function_calls_alloca;
140 /* Nonzero if the current function returns a pointer type */
142 int current_function_returns_pointer;
144 /* If some insns can be deferred to the delay slots of the epilogue, the
145 delay list for them is recorded here. */
147 rtx current_function_epilogue_delay_list;
149 /* If function's args have a fixed size, this is that size, in bytes.
150 Otherwise, it is -1.
151 May affect compilation of return insn or of function epilogue. */
153 int current_function_args_size;
155 /* # bytes the prologue should push and pretend that the caller pushed them.
156 The prologue must do this, but only if parms can be passed in registers. */
158 int current_function_pretend_args_size;
160 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
161 defined, the needed space is pushed by the prologue. */
163 int current_function_outgoing_args_size;
165 /* This is the offset from the arg pointer to the place where the first
166 anonymous arg can be found, if there is one. */
168 rtx current_function_arg_offset_rtx;
170 /* Nonzero if current function uses varargs.h or equivalent.
171 Zero for functions that use stdarg.h. */
173 int current_function_varargs;
175 /* Nonzero if current function uses stdarg.h or equivalent.
176 Zero for functions that use varargs.h. */
178 int current_function_stdarg;
180 /* Quantities of various kinds of registers
181 used for the current function's args. */
183 CUMULATIVE_ARGS current_function_args_info;
185 /* Name of function now being compiled. */
187 char *current_function_name;
189 /* If non-zero, an RTL expression for that location at which the current
190 function returns its result. Always equal to
191 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
192 independently of the tree structures. */
194 rtx current_function_return_rtx;
196 /* Nonzero if the current function uses the constant pool. */
198 int current_function_uses_const_pool;
200 /* Nonzero if the current function uses pic_offset_table_rtx. */
201 int current_function_uses_pic_offset_table;
203 /* The arg pointer hard register, or the pseudo into which it was copied. */
204 rtx current_function_internal_arg_pointer;
206 /* The FUNCTION_DECL for an inline function currently being expanded. */
207 tree inline_function_decl;
209 /* Number of function calls seen so far in current function. */
211 int function_call_count;
213 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
214 (labels to which there can be nonlocal gotos from nested functions)
215 in this function. */
217 tree nonlocal_labels;
219 /* RTX for stack slot that holds the current handler for nonlocal gotos.
220 Zero when function does not have nonlocal labels. */
222 rtx nonlocal_goto_handler_slot;
224 /* RTX for stack slot that holds the stack pointer value to restore
225 for a nonlocal goto.
226 Zero when function does not have nonlocal labels. */
228 rtx nonlocal_goto_stack_level;
230 /* Label that will go on parm cleanup code, if any.
231 Jumping to this label runs cleanup code for parameters, if
232 such code must be run. Following this code is the logical return label. */
234 rtx cleanup_label;
236 /* Label that will go on function epilogue.
237 Jumping to this label serves as a "return" instruction
238 on machines which require execution of the epilogue on all returns. */
240 rtx return_label;
242 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
243 So we can mark them all live at the end of the function, if nonopt. */
244 rtx save_expr_regs;
246 /* List (chain of EXPR_LISTs) of all stack slots in this function.
247 Made for the sake of unshare_all_rtl. */
248 rtx stack_slot_list;
250 /* Chain of all RTL_EXPRs that have insns in them. */
251 tree rtl_expr_chain;
253 /* Label to jump back to for tail recursion, or 0 if we have
254 not yet needed one for this function. */
255 rtx tail_recursion_label;
257 /* Place after which to insert the tail_recursion_label if we need one. */
258 rtx tail_recursion_reentry;
260 /* Location at which to save the argument pointer if it will need to be
261 referenced. There are two cases where this is done: if nonlocal gotos
262 exist, or if vars stored at an offset from the argument pointer will be
263 needed by inner routines. */
265 rtx arg_pointer_save_area;
267 /* Offset to end of allocated area of stack frame.
268 If stack grows down, this is the address of the last stack slot allocated.
269 If stack grows up, this is the address for the next slot. */
270 HOST_WIDE_INT frame_offset;
272 /* List (chain of TREE_LISTs) of static chains for containing functions.
273 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
274 in an RTL_EXPR in the TREE_VALUE. */
275 static tree context_display;
277 /* List (chain of TREE_LISTs) of trampolines for nested functions.
278 The trampoline sets up the static chain and jumps to the function.
279 We supply the trampoline's address when the function's address is requested.
281 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
282 in an RTL_EXPR in the TREE_VALUE. */
283 static tree trampoline_list;
285 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
286 static rtx parm_birth_insn;
288 #if 0
289 /* Nonzero if a stack slot has been generated whose address is not
290 actually valid. It means that the generated rtl must all be scanned
291 to detect and correct the invalid addresses where they occur. */
292 static int invalid_stack_slot;
293 #endif
295 /* Last insn of those whose job was to put parms into their nominal homes. */
296 static rtx last_parm_insn;
298 /* 1 + last pseudo register number used for loading a copy
299 of a parameter of this function. */
300 static int max_parm_reg;
302 /* Vector indexed by REGNO, containing location on stack in which
303 to put the parm which is nominally in pseudo register REGNO,
304 if we discover that that parm must go in the stack. */
305 static rtx *parm_reg_stack_loc;
307 /* Nonzero once virtual register instantiation has been done.
308 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
309 static int virtuals_instantiated;
311 /* These variables hold pointers to functions to
312 save and restore machine-specific data,
313 in push_function_context and pop_function_context. */
314 void (*save_machine_status) PROTO((struct function *));
315 void (*restore_machine_status) PROTO((struct function *));
317 /* Nonzero if we need to distinguish between the return value of this function
318 and the return value of a function called by this function. This helps
319 integrate.c */
321 extern int rtx_equal_function_value_matters;
322 extern tree sequence_rtl_expr;
324 /* In order to evaluate some expressions, such as function calls returning
325 structures in memory, we need to temporarily allocate stack locations.
326 We record each allocated temporary in the following structure.
328 Associated with each temporary slot is a nesting level. When we pop up
329 one level, all temporaries associated with the previous level are freed.
330 Normally, all temporaries are freed after the execution of the statement
331 in which they were created. However, if we are inside a ({...}) grouping,
332 the result may be in a temporary and hence must be preserved. If the
333 result could be in a temporary, we preserve it if we can determine which
334 one it is in. If we cannot determine which temporary may contain the
335 result, all temporaries are preserved. A temporary is preserved by
336 pretending it was allocated at the previous nesting level.
338 Automatic variables are also assigned temporary slots, at the nesting
339 level where they are defined. They are marked a "kept" so that
340 free_temp_slots will not free them. */
342 struct temp_slot
344 /* Points to next temporary slot. */
345 struct temp_slot *next;
346 /* The rtx to used to reference the slot. */
347 rtx slot;
348 /* The rtx used to represent the address if not the address of the
349 slot above. May be an EXPR_LIST if multiple addresses exist. */
350 rtx address;
351 /* The size, in units, of the slot. */
352 int size;
353 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
354 tree rtl_expr;
355 /* Non-zero if this temporary is currently in use. */
356 char in_use;
357 /* Non-zero if this temporary has its address taken. */
358 char addr_taken;
359 /* Nesting level at which this slot is being used. */
360 int level;
361 /* Non-zero if this should survive a call to free_temp_slots. */
362 int keep;
363 /* The offset of the slot from the frame_pointer, including extra space
364 for alignment. This info is for combine_temp_slots. */
365 int base_offset;
366 /* The size of the slot, including extra space for alignment. This
367 info is for combine_temp_slots. */
368 int full_size;
371 /* List of all temporaries allocated, both available and in use. */
373 struct temp_slot *temp_slots;
375 /* Current nesting level for temporaries. */
377 int temp_slot_level;
379 /* The FUNCTION_DECL node for the current function. */
380 static tree this_function_decl;
382 /* Callinfo pointer for the current function. */
383 static rtx this_function_callinfo;
385 /* The label in the bytecode file of this function's actual bytecode.
386 Not an rtx. */
387 static char *this_function_bytecode;
389 /* The call description vector for the current function. */
390 static rtx this_function_calldesc;
392 /* Size of the local variables allocated for the current function. */
393 int local_vars_size;
395 /* Current depth of the bytecode evaluation stack. */
396 int stack_depth;
398 /* Maximum depth of the evaluation stack in this function. */
399 int max_stack_depth;
401 /* Current depth in statement expressions. */
402 static int stmt_expr_depth;
404 /* This structure is used to record MEMs or pseudos used to replace VAR, any
405 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
406 maintain this list in case two operands of an insn were required to match;
407 in that case we must ensure we use the same replacement. */
409 struct fixup_replacement
411 rtx old;
412 rtx new;
413 struct fixup_replacement *next;
416 /* Forward declarations. */
418 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
419 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
420 enum machine_mode, enum machine_mode,
421 int));
422 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
423 static struct fixup_replacement
424 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
425 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
426 rtx, int));
427 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
428 struct fixup_replacement **));
429 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
430 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
431 static rtx fixup_stack_1 PROTO((rtx, rtx));
432 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
433 static void instantiate_decls PROTO((tree, int));
434 static void instantiate_decls_1 PROTO((tree, int));
435 static void instantiate_decl PROTO((rtx, int, int));
436 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
437 static void delete_handlers PROTO((void));
438 static void pad_to_arg_alignment PROTO((struct args_size *, int));
439 static void pad_below PROTO((struct args_size *, enum machine_mode,
440 tree));
441 static tree round_down PROTO((tree, int));
442 static rtx round_trampoline_addr PROTO((rtx));
443 static tree blocks_nreverse PROTO((tree));
444 static int all_blocks PROTO((tree, tree *));
445 static int *record_insns PROTO((rtx));
446 static int contains PROTO((rtx, int *));
448 /* Pointer to chain of `struct function' for containing functions. */
449 struct function *outer_function_chain;
451 /* Given a function decl for a containing function,
452 return the `struct function' for it. */
454 struct function *
455 find_function_data (decl)
456 tree decl;
458 struct function *p;
459 for (p = outer_function_chain; p; p = p->next)
460 if (p->decl == decl)
461 return p;
462 abort ();
465 /* Save the current context for compilation of a nested function.
466 This is called from language-specific code.
467 The caller is responsible for saving any language-specific status,
468 since this function knows only about language-independent variables. */
470 void
471 push_function_context_to (context)
472 tree context;
474 struct function *p = (struct function *) xmalloc (sizeof (struct function));
476 p->next = outer_function_chain;
477 outer_function_chain = p;
479 p->name = current_function_name;
480 p->decl = current_function_decl;
481 p->pops_args = current_function_pops_args;
482 p->returns_struct = current_function_returns_struct;
483 p->returns_pcc_struct = current_function_returns_pcc_struct;
484 p->returns_pointer = current_function_returns_pointer;
485 p->needs_context = current_function_needs_context;
486 p->calls_setjmp = current_function_calls_setjmp;
487 p->calls_longjmp = current_function_calls_longjmp;
488 p->calls_alloca = current_function_calls_alloca;
489 p->has_nonlocal_label = current_function_has_nonlocal_label;
490 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
491 p->contains_functions = current_function_contains_functions;
492 p->args_size = current_function_args_size;
493 p->pretend_args_size = current_function_pretend_args_size;
494 p->arg_offset_rtx = current_function_arg_offset_rtx;
495 p->varargs = current_function_varargs;
496 p->stdarg = current_function_stdarg;
497 p->uses_const_pool = current_function_uses_const_pool;
498 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
499 p->internal_arg_pointer = current_function_internal_arg_pointer;
500 p->max_parm_reg = max_parm_reg;
501 p->parm_reg_stack_loc = parm_reg_stack_loc;
502 p->outgoing_args_size = current_function_outgoing_args_size;
503 p->return_rtx = current_function_return_rtx;
504 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
505 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
506 p->nonlocal_labels = nonlocal_labels;
507 p->cleanup_label = cleanup_label;
508 p->return_label = return_label;
509 p->save_expr_regs = save_expr_regs;
510 p->stack_slot_list = stack_slot_list;
511 p->parm_birth_insn = parm_birth_insn;
512 p->frame_offset = frame_offset;
513 p->tail_recursion_label = tail_recursion_label;
514 p->tail_recursion_reentry = tail_recursion_reentry;
515 p->arg_pointer_save_area = arg_pointer_save_area;
516 p->rtl_expr_chain = rtl_expr_chain;
517 p->last_parm_insn = last_parm_insn;
518 p->context_display = context_display;
519 p->trampoline_list = trampoline_list;
520 p->function_call_count = function_call_count;
521 p->temp_slots = temp_slots;
522 p->temp_slot_level = temp_slot_level;
523 p->fixup_var_refs_queue = 0;
524 p->epilogue_delay_list = current_function_epilogue_delay_list;
525 p->args_info = current_function_args_info;
527 save_tree_status (p, context);
528 save_storage_status (p);
529 save_emit_status (p);
530 init_emit ();
531 save_expr_status (p);
532 save_stmt_status (p);
533 save_varasm_status (p);
535 if (save_machine_status)
536 (*save_machine_status) (p);
539 void
540 push_function_context ()
542 push_function_context_to (current_function_decl);
545 /* Restore the last saved context, at the end of a nested function.
546 This function is called from language-specific code. */
548 void
549 pop_function_context_from (context)
550 tree context;
552 struct function *p = outer_function_chain;
554 outer_function_chain = p->next;
556 current_function_contains_functions
557 = p->contains_functions || p->inline_obstacks
558 || context == current_function_decl;
559 current_function_name = p->name;
560 current_function_decl = p->decl;
561 current_function_pops_args = p->pops_args;
562 current_function_returns_struct = p->returns_struct;
563 current_function_returns_pcc_struct = p->returns_pcc_struct;
564 current_function_returns_pointer = p->returns_pointer;
565 current_function_needs_context = p->needs_context;
566 current_function_calls_setjmp = p->calls_setjmp;
567 current_function_calls_longjmp = p->calls_longjmp;
568 current_function_calls_alloca = p->calls_alloca;
569 current_function_has_nonlocal_label = p->has_nonlocal_label;
570 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
571 current_function_args_size = p->args_size;
572 current_function_pretend_args_size = p->pretend_args_size;
573 current_function_arg_offset_rtx = p->arg_offset_rtx;
574 current_function_varargs = p->varargs;
575 current_function_stdarg = p->stdarg;
576 current_function_uses_const_pool = p->uses_const_pool;
577 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
578 current_function_internal_arg_pointer = p->internal_arg_pointer;
579 max_parm_reg = p->max_parm_reg;
580 parm_reg_stack_loc = p->parm_reg_stack_loc;
581 current_function_outgoing_args_size = p->outgoing_args_size;
582 current_function_return_rtx = p->return_rtx;
583 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
584 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
585 nonlocal_labels = p->nonlocal_labels;
586 cleanup_label = p->cleanup_label;
587 return_label = p->return_label;
588 save_expr_regs = p->save_expr_regs;
589 stack_slot_list = p->stack_slot_list;
590 parm_birth_insn = p->parm_birth_insn;
591 frame_offset = p->frame_offset;
592 tail_recursion_label = p->tail_recursion_label;
593 tail_recursion_reentry = p->tail_recursion_reentry;
594 arg_pointer_save_area = p->arg_pointer_save_area;
595 rtl_expr_chain = p->rtl_expr_chain;
596 last_parm_insn = p->last_parm_insn;
597 context_display = p->context_display;
598 trampoline_list = p->trampoline_list;
599 function_call_count = p->function_call_count;
600 temp_slots = p->temp_slots;
601 temp_slot_level = p->temp_slot_level;
602 current_function_epilogue_delay_list = p->epilogue_delay_list;
603 reg_renumber = 0;
604 current_function_args_info = p->args_info;
606 restore_tree_status (p);
607 restore_storage_status (p);
608 restore_expr_status (p);
609 restore_emit_status (p);
610 restore_stmt_status (p);
611 restore_varasm_status (p);
613 if (restore_machine_status)
614 (*restore_machine_status) (p);
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
619 struct var_refs_queue *queue = p->fixup_var_refs_queue;
620 for (; queue; queue = queue->next)
621 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
624 free (p);
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters = 1;
628 virtuals_instantiated = 0;
631 void pop_function_context ()
633 pop_function_context_from (current_function_decl);
636 /* Allocate fixed slots in the stack frame of the current function. */
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
642 HOST_WIDE_INT
643 get_frame_size ()
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset;
647 #else
648 return frame_offset;
649 #endif
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
660 We do not round to stack_boundary here. */
663 assign_stack_local (mode, size, align)
664 enum machine_mode mode;
665 int size;
666 int align;
668 register rtx x, addr;
669 int bigend_correction = 0;
670 int alignment;
672 if (align == 0)
674 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
675 if (mode == BLKmode)
676 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
678 else if (align == -1)
680 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
681 size = CEIL_ROUND (size, alignment);
683 else
684 alignment = align / BITS_PER_UNIT;
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset = FLOOR_ROUND (frame_offset, alignment);
693 #else
694 frame_offset = CEIL_ROUND (frame_offset, alignment);
695 #endif
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN && mode != BLKmode)
700 bigend_correction = size - GET_MODE_SIZE (mode);
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset -= size;
704 #endif
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated)
709 addr = plus_constant (frame_pointer_rtx,
710 (frame_offset + bigend_correction
711 + STARTING_FRAME_OFFSET));
712 else
713 addr = plus_constant (virtual_stack_vars_rtx,
714 frame_offset + bigend_correction);
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset += size;
718 #endif
720 x = gen_rtx (MEM, mode, addr);
722 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
724 return x;
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
732 assign_outer_stack_local (mode, size, align, function)
733 enum machine_mode mode;
734 int size;
735 int align;
736 struct function *function;
738 register rtx x, addr;
739 int bigend_correction = 0;
740 int alignment;
742 /* Allocate in the memory associated with the function in whose frame
743 we are assigning. */
744 push_obstacks (function->function_obstack,
745 function->function_maybepermanent_obstack);
747 if (align == 0)
749 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
750 if (mode == BLKmode)
751 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
753 else if (align == -1)
755 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
756 size = CEIL_ROUND (size, alignment);
758 else
759 alignment = align / BITS_PER_UNIT;
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
764 #else
765 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
766 #endif
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN && mode != BLKmode)
771 bigend_correction = size - GET_MODE_SIZE (mode);
773 #ifdef FRAME_GROWS_DOWNWARD
774 function->frame_offset -= size;
775 #endif
776 addr = plus_constant (virtual_stack_vars_rtx,
777 function->frame_offset + bigend_correction);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function->frame_offset += size;
780 #endif
782 x = gen_rtx (MEM, mode, addr);
784 function->stack_slot_list
785 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
787 pop_obstacks ();
789 return x;
792 /* Allocate a temporary stack slot and record it for possible later
793 reuse.
795 MODE is the machine mode to be given to the returned rtx.
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
806 assign_stack_temp (mode, size, keep)
807 enum machine_mode mode;
808 int size;
809 int keep;
811 struct temp_slot *p, *best_p = 0;
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
815 if (size == -1)
816 abort ();
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p = temp_slots; p; p = p->next)
821 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
822 break;
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
826 if (p == 0)
827 for (p = temp_slots; p; p = p->next)
828 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
829 && (best_p == 0 || best_p->size > p->size))
830 best_p = p;
832 /* Make our best, if any, the one to use. */
833 if (best_p)
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p->slot) == BLKmode)
840 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
841 int rounded_size = CEIL_ROUND (size, alignment);
843 if (best_p->size - rounded_size >= alignment)
845 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
846 p->in_use = p->addr_taken = 0;
847 p->size = best_p->size - rounded_size;
848 p->base_offset = best_p->base_offset + rounded_size;
849 p->full_size = best_p->full_size - rounded_size;
850 p->slot = gen_rtx (MEM, BLKmode,
851 plus_constant (XEXP (best_p->slot, 0),
852 rounded_size));
853 p->address = 0;
854 p->rtl_expr = 0;
855 p->next = temp_slots;
856 temp_slots = p;
858 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
859 stack_slot_list);
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
866 p = best_p;
869 /* If we still didn't find one, make a new temporary. */
870 if (p == 0)
872 int frame_offset_old = frame_offset;
873 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->size = frame_offset_old - frame_offset;
886 #else
887 p->size = size;
888 #endif
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p->base_offset = frame_offset;
892 p->full_size = frame_offset_old - frame_offset;
893 #else
894 p->base_offset = frame_offset_old;
895 p->full_size = frame_offset - frame_offset_old;
896 #endif
897 p->address = 0;
898 p->next = temp_slots;
899 temp_slots = p;
902 p->in_use = 1;
903 p->addr_taken = 0;
904 p->rtl_expr = sequence_rtl_expr;
906 if (keep == 2)
908 p->level = target_temp_slot_level;
909 p->keep = 0;
911 else
913 p->level = temp_slot_level;
914 p->keep = keep;
917 /* We may be reusing an old slot, so clear any MEM flags that may have been
918 set from before. */
919 RTX_UNCHANGING_P (p->slot) = 0;
920 MEM_IN_STRUCT_P (p->slot) = 0;
921 return p->slot;
924 /* Assign a temporary of given TYPE.
925 KEEP is as for assign_stack_temp.
926 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
927 it is 0 if a register is OK.
928 DONT_PROMOTE is 1 if we should not promote values in register
929 to wider modes. */
932 assign_temp (type, keep, memory_required, dont_promote)
933 tree type;
934 int keep;
935 int memory_required;
936 int dont_promote;
938 enum machine_mode mode = TYPE_MODE (type);
939 int unsignedp = TREE_UNSIGNED (type);
941 if (mode == BLKmode || memory_required)
943 int size = int_size_in_bytes (type);
944 rtx tmp;
946 /* Unfortunately, we don't yet know how to allocate variable-sized
947 temporaries. However, sometimes we have a fixed upper limit on
948 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
949 instead. This is the case for Chill variable-sized strings. */
950 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
951 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
952 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
953 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
955 tmp = assign_stack_temp (mode, size, keep);
956 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
957 return tmp;
960 #ifndef PROMOTE_FOR_CALL_ONLY
961 if (! dont_promote)
962 mode = promote_mode (type, mode, &unsignedp, 0);
963 #endif
965 return gen_reg_rtx (mode);
968 /* Combine temporary stack slots which are adjacent on the stack.
970 This allows for better use of already allocated stack space. This is only
971 done for BLKmode slots because we can be sure that we won't have alignment
972 problems in this case. */
974 void
975 combine_temp_slots ()
977 struct temp_slot *p, *q;
978 struct temp_slot *prev_p, *prev_q;
979 /* Determine where to free back to after this function. */
980 rtx free_pointer = rtx_alloc (CONST_INT);
982 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
984 int delete_p = 0;
985 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
986 for (q = p->next, prev_q = p; q; q = prev_q->next)
988 int delete_q = 0;
989 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
991 if (p->base_offset + p->full_size == q->base_offset)
993 /* Q comes after P; combine Q into P. */
994 p->size += q->size;
995 p->full_size += q->full_size;
996 delete_q = 1;
998 else if (q->base_offset + q->full_size == p->base_offset)
1000 /* P comes after Q; combine P into Q. */
1001 q->size += p->size;
1002 q->full_size += p->full_size;
1003 delete_p = 1;
1004 break;
1007 /* Either delete Q or advance past it. */
1008 if (delete_q)
1009 prev_q->next = q->next;
1010 else
1011 prev_q = q;
1013 /* Either delete P or advance past it. */
1014 if (delete_p)
1016 if (prev_p)
1017 prev_p->next = p->next;
1018 else
1019 temp_slots = p->next;
1021 else
1022 prev_p = p;
1025 /* Free all the RTL made by plus_constant. */
1026 rtx_free (free_pointer);
1029 /* Find the temp slot corresponding to the object at address X. */
1031 static struct temp_slot *
1032 find_temp_slot_from_address (x)
1033 rtx x;
1035 struct temp_slot *p;
1036 rtx next;
1038 for (p = temp_slots; p; p = p->next)
1040 if (! p->in_use)
1041 continue;
1042 else if (XEXP (p->slot, 0) == x
1043 || p->address == x
1044 || (GET_CODE (x) == PLUS
1045 && XEXP (x, 0) == virtual_stack_vars_rtx
1046 && GET_CODE (XEXP (x, 1)) == CONST_INT
1047 && INTVAL (XEXP (x, 1)) >= p->base_offset
1048 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1049 return p;
1051 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1052 for (next = p->address; next; next = XEXP (next, 1))
1053 if (XEXP (next, 0) == x)
1054 return p;
1057 return 0;
1060 /* Indicate that NEW is an alternate way of referring to the temp slot
1061 that previous was known by OLD. */
1063 void
1064 update_temp_slot_address (old, new)
1065 rtx old, new;
1067 struct temp_slot *p = find_temp_slot_from_address (old);
1069 /* If none, return. Else add NEW as an alias. */
1070 if (p == 0)
1071 return;
1072 else if (p->address == 0)
1073 p->address = new;
1074 else
1076 if (GET_CODE (p->address) != EXPR_LIST)
1077 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1079 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1083 /* If X could be a reference to a temporary slot, mark the fact that its
1084 address was taken. */
1086 void
1087 mark_temp_addr_taken (x)
1088 rtx x;
1090 struct temp_slot *p;
1092 if (x == 0)
1093 return;
1095 /* If X is not in memory or is at a constant address, it cannot be in
1096 a temporary slot. */
1097 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1098 return;
1100 p = find_temp_slot_from_address (XEXP (x, 0));
1101 if (p != 0)
1102 p->addr_taken = 1;
1105 /* If X could be a reference to a temporary slot, mark that slot as
1106 belonging to the to one level higher than the current level. If X
1107 matched one of our slots, just mark that one. Otherwise, we can't
1108 easily predict which it is, so upgrade all of them. Kept slots
1109 need not be touched.
1111 This is called when an ({...}) construct occurs and a statement
1112 returns a value in memory. */
1114 void
1115 preserve_temp_slots (x)
1116 rtx x;
1118 struct temp_slot *p = 0;
1120 /* If there is no result, we still might have some objects whose address
1121 were taken, so we need to make sure they stay around. */
1122 if (x == 0)
1124 for (p = temp_slots; p; p = p->next)
1125 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1126 p->level--;
1128 return;
1131 /* If X is a register that is being used as a pointer, see if we have
1132 a temporary slot we know it points to. To be consistent with
1133 the code below, we really should preserve all non-kept slots
1134 if we can't find a match, but that seems to be much too costly. */
1135 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1136 p = find_temp_slot_from_address (x);
1138 /* If X is not in memory or is at a constant address, it cannot be in
1139 a temporary slot, but it can contain something whose address was
1140 taken. */
1141 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1143 for (p = temp_slots; p; p = p->next)
1144 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1145 p->level--;
1147 return;
1150 /* First see if we can find a match. */
1151 if (p == 0)
1152 p = find_temp_slot_from_address (XEXP (x, 0));
1154 if (p != 0)
1156 /* Move everything at our level whose address was taken to our new
1157 level in case we used its address. */
1158 struct temp_slot *q;
1160 if (p->level == temp_slot_level)
1162 for (q = temp_slots; q; q = q->next)
1163 if (q != p && q->addr_taken && q->level == p->level)
1164 q->level--;
1166 p->level--;
1167 p->addr_taken = 0;
1169 return;
1172 /* Otherwise, preserve all non-kept slots at this level. */
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1175 p->level--;
1178 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1179 with that RTL_EXPR, promote it into a temporary slot at the present
1180 level so it will not be freed when we free slots made in the
1181 RTL_EXPR. */
1183 void
1184 preserve_rtl_expr_result (x)
1185 rtx x;
1187 struct temp_slot *p;
1189 /* If X is not in memory or is at a constant address, it cannot be in
1190 a temporary slot. */
1191 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1192 return;
1194 /* If we can find a match, move it to our level unless it is already at
1195 an upper level. */
1196 p = find_temp_slot_from_address (XEXP (x, 0));
1197 if (p != 0)
1199 p->level = MIN (p->level, temp_slot_level);
1200 p->rtl_expr = 0;
1203 return;
1206 /* Free all temporaries used so far. This is normally called at the end
1207 of generating code for a statement. Don't free any temporaries
1208 currently in use for an RTL_EXPR that hasn't yet been emitted.
1209 We could eventually do better than this since it can be reused while
1210 generating the same RTL_EXPR, but this is complex and probably not
1211 worthwhile. */
1213 void
1214 free_temp_slots ()
1216 struct temp_slot *p;
1218 for (p = temp_slots; p; p = p->next)
1219 if (p->in_use && p->level == temp_slot_level && ! p->keep
1220 && p->rtl_expr == 0)
1221 p->in_use = 0;
1223 combine_temp_slots ();
1226 /* Free all temporary slots used in T, an RTL_EXPR node. */
1228 void
1229 free_temps_for_rtl_expr (t)
1230 tree t;
1232 struct temp_slot *p;
1234 for (p = temp_slots; p; p = p->next)
1235 if (p->rtl_expr == t)
1236 p->in_use = 0;
1238 combine_temp_slots ();
1241 /* Mark all temporaries ever allocated in this functon as not suitable
1242 for reuse until the current level is exited. */
1244 void
1245 mark_all_temps_used ()
1247 struct temp_slot *p;
1249 for (p = temp_slots; p; p = p->next)
1251 p->in_use = p->keep = 1;
1252 p->level = MIN (p->level, temp_slot_level);
1256 /* Push deeper into the nesting level for stack temporaries. */
1258 void
1259 push_temp_slots ()
1261 temp_slot_level++;
1264 /* Pop a temporary nesting level. All slots in use in the current level
1265 are freed. */
1267 void
1268 pop_temp_slots ()
1270 struct temp_slot *p;
1272 for (p = temp_slots; p; p = p->next)
1273 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1274 p->in_use = 0;
1276 combine_temp_slots ();
1278 temp_slot_level--;
1281 /* Initialize temporary slots. */
1283 void
1284 init_temp_slots ()
1286 /* We have not allocated any temporaries yet. */
1287 temp_slots = 0;
1288 temp_slot_level = 0;
1289 target_temp_slot_level = 0;
1292 /* Retroactively move an auto variable from a register to a stack slot.
1293 This is done when an address-reference to the variable is seen. */
1295 void
1296 put_var_into_stack (decl)
1297 tree decl;
1299 register rtx reg;
1300 enum machine_mode promoted_mode, decl_mode;
1301 struct function *function = 0;
1302 tree context;
1304 if (output_bytecode)
1305 return;
1307 context = decl_function_context (decl);
1309 /* Get the current rtl used for this object and it's original mode. */
1310 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1312 /* No need to do anything if decl has no rtx yet
1313 since in that case caller is setting TREE_ADDRESSABLE
1314 and a stack slot will be assigned when the rtl is made. */
1315 if (reg == 0)
1316 return;
1318 /* Get the declared mode for this object. */
1319 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1320 : DECL_MODE (decl));
1321 /* Get the mode it's actually stored in. */
1322 promoted_mode = GET_MODE (reg);
1324 /* If this variable comes from an outer function,
1325 find that function's saved context. */
1326 if (context != current_function_decl)
1327 for (function = outer_function_chain; function; function = function->next)
1328 if (function->decl == context)
1329 break;
1331 /* If this is a variable-size object with a pseudo to address it,
1332 put that pseudo into the stack, if the var is nonlocal. */
1333 if (DECL_NONLOCAL (decl)
1334 && GET_CODE (reg) == MEM
1335 && GET_CODE (XEXP (reg, 0)) == REG
1336 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1338 reg = XEXP (reg, 0);
1339 decl_mode = promoted_mode = GET_MODE (reg);
1342 /* Now we should have a value that resides in one or more pseudo regs. */
1344 if (GET_CODE (reg) == REG)
1345 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1346 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1347 else if (GET_CODE (reg) == CONCAT)
1349 /* A CONCAT contains two pseudos; put them both in the stack.
1350 We do it so they end up consecutive. */
1351 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1352 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1353 #ifdef FRAME_GROWS_DOWNWARD
1354 /* Since part 0 should have a lower address, do it second. */
1355 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1356 part_mode, TREE_SIDE_EFFECTS (decl));
1357 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1358 part_mode, TREE_SIDE_EFFECTS (decl));
1359 #else
1360 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1361 part_mode, TREE_SIDE_EFFECTS (decl));
1362 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1363 part_mode, TREE_SIDE_EFFECTS (decl));
1364 #endif
1366 /* Change the CONCAT into a combined MEM for both parts. */
1367 PUT_CODE (reg, MEM);
1368 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1370 /* The two parts are in memory order already.
1371 Use the lower parts address as ours. */
1372 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1373 /* Prevent sharing of rtl that might lose. */
1374 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1375 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1379 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1380 into the stack frame of FUNCTION (0 means the current function).
1381 DECL_MODE is the machine mode of the user-level data type.
1382 PROMOTED_MODE is the machine mode of the register.
1383 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1385 static void
1386 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1387 struct function *function;
1388 rtx reg;
1389 tree type;
1390 enum machine_mode promoted_mode, decl_mode;
1391 int volatile_p;
1393 rtx new = 0;
1395 if (function)
1397 if (REGNO (reg) < function->max_parm_reg)
1398 new = function->parm_reg_stack_loc[REGNO (reg)];
1399 if (new == 0)
1400 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1401 0, function);
1403 else
1405 if (REGNO (reg) < max_parm_reg)
1406 new = parm_reg_stack_loc[REGNO (reg)];
1407 if (new == 0)
1408 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1411 PUT_MODE (reg, decl_mode);
1412 XEXP (reg, 0) = XEXP (new, 0);
1413 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1414 MEM_VOLATILE_P (reg) = volatile_p;
1415 PUT_CODE (reg, MEM);
1417 /* If this is a memory ref that contains aggregate components,
1418 mark it as such for cse and loop optimize. */
1419 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1421 /* Now make sure that all refs to the variable, previously made
1422 when it was a register, are fixed up to be valid again. */
1423 if (function)
1425 struct var_refs_queue *temp;
1427 /* Variable is inherited; fix it up when we get back to its function. */
1428 push_obstacks (function->function_obstack,
1429 function->function_maybepermanent_obstack);
1431 /* See comment in restore_tree_status in tree.c for why this needs to be
1432 on saveable obstack. */
1433 temp
1434 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1435 temp->modified = reg;
1436 temp->promoted_mode = promoted_mode;
1437 temp->unsignedp = TREE_UNSIGNED (type);
1438 temp->next = function->fixup_var_refs_queue;
1439 function->fixup_var_refs_queue = temp;
1440 pop_obstacks ();
1442 else
1443 /* Variable is local; fix it up now. */
1444 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1447 static void
1448 fixup_var_refs (var, promoted_mode, unsignedp)
1449 rtx var;
1450 enum machine_mode promoted_mode;
1451 int unsignedp;
1453 tree pending;
1454 rtx first_insn = get_insns ();
1455 struct sequence_stack *stack = sequence_stack;
1456 tree rtl_exps = rtl_expr_chain;
1458 /* Must scan all insns for stack-refs that exceed the limit. */
1459 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1461 /* Scan all pending sequences too. */
1462 for (; stack; stack = stack->next)
1464 push_to_sequence (stack->first);
1465 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1466 stack->first, stack->next != 0);
1467 /* Update remembered end of sequence
1468 in case we added an insn at the end. */
1469 stack->last = get_last_insn ();
1470 end_sequence ();
1473 /* Scan all waiting RTL_EXPRs too. */
1474 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1476 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1477 if (seq != const0_rtx && seq != 0)
1479 push_to_sequence (seq);
1480 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1481 end_sequence ();
1486 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1487 some part of an insn. Return a struct fixup_replacement whose OLD
1488 value is equal to X. Allocate a new structure if no such entry exists. */
1490 static struct fixup_replacement *
1491 find_fixup_replacement (replacements, x)
1492 struct fixup_replacement **replacements;
1493 rtx x;
1495 struct fixup_replacement *p;
1497 /* See if we have already replaced this. */
1498 for (p = *replacements; p && p->old != x; p = p->next)
1501 if (p == 0)
1503 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1504 p->old = x;
1505 p->new = 0;
1506 p->next = *replacements;
1507 *replacements = p;
1510 return p;
1513 /* Scan the insn-chain starting with INSN for refs to VAR
1514 and fix them up. TOPLEVEL is nonzero if this chain is the
1515 main chain of insns for the current function. */
1517 static void
1518 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1519 rtx var;
1520 enum machine_mode promoted_mode;
1521 int unsignedp;
1522 rtx insn;
1523 int toplevel;
1525 rtx call_dest = 0;
1527 while (insn)
1529 rtx next = NEXT_INSN (insn);
1530 rtx note;
1531 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1533 /* If this is a CLOBBER of VAR, delete it.
1535 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1536 and REG_RETVAL notes too. */
1537 if (GET_CODE (PATTERN (insn)) == CLOBBER
1538 && XEXP (PATTERN (insn), 0) == var)
1540 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1541 /* The REG_LIBCALL note will go away since we are going to
1542 turn INSN into a NOTE, so just delete the
1543 corresponding REG_RETVAL note. */
1544 remove_note (XEXP (note, 0),
1545 find_reg_note (XEXP (note, 0), REG_RETVAL,
1546 NULL_RTX));
1548 /* In unoptimized compilation, we shouldn't call delete_insn
1549 except in jump.c doing warnings. */
1550 PUT_CODE (insn, NOTE);
1551 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1552 NOTE_SOURCE_FILE (insn) = 0;
1555 /* The insn to load VAR from a home in the arglist
1556 is now a no-op. When we see it, just delete it. */
1557 else if (toplevel
1558 && GET_CODE (PATTERN (insn)) == SET
1559 && SET_DEST (PATTERN (insn)) == var
1560 /* If this represents the result of an insn group,
1561 don't delete the insn. */
1562 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1563 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1565 /* In unoptimized compilation, we shouldn't call delete_insn
1566 except in jump.c doing warnings. */
1567 PUT_CODE (insn, NOTE);
1568 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1569 NOTE_SOURCE_FILE (insn) = 0;
1570 if (insn == last_parm_insn)
1571 last_parm_insn = PREV_INSN (next);
1573 else
1575 struct fixup_replacement *replacements = 0;
1576 rtx next_insn = NEXT_INSN (insn);
1578 #ifdef SMALL_REGISTER_CLASSES
1579 /* If the insn that copies the results of a CALL_INSN
1580 into a pseudo now references VAR, we have to use an
1581 intermediate pseudo since we want the life of the
1582 return value register to be only a single insn.
1584 If we don't use an intermediate pseudo, such things as
1585 address computations to make the address of VAR valid
1586 if it is not can be placed between the CALL_INSN and INSN.
1588 To make sure this doesn't happen, we record the destination
1589 of the CALL_INSN and see if the next insn uses both that
1590 and VAR. */
1592 if (SMALL_REGISTER_CLASSES)
1594 if (call_dest != 0 && GET_CODE (insn) == INSN
1595 && reg_mentioned_p (var, PATTERN (insn))
1596 && reg_mentioned_p (call_dest, PATTERN (insn)))
1598 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1600 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1602 PATTERN (insn) = replace_rtx (PATTERN (insn),
1603 call_dest, temp);
1606 if (GET_CODE (insn) == CALL_INSN
1607 && GET_CODE (PATTERN (insn)) == SET)
1608 call_dest = SET_DEST (PATTERN (insn));
1609 else if (GET_CODE (insn) == CALL_INSN
1610 && GET_CODE (PATTERN (insn)) == PARALLEL
1611 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1612 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1613 else
1614 call_dest = 0;
1616 #endif
1618 /* See if we have to do anything to INSN now that VAR is in
1619 memory. If it needs to be loaded into a pseudo, use a single
1620 pseudo for the entire insn in case there is a MATCH_DUP
1621 between two operands. We pass a pointer to the head of
1622 a list of struct fixup_replacements. If fixup_var_refs_1
1623 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1624 it will record them in this list.
1626 If it allocated a pseudo for any replacement, we copy into
1627 it here. */
1629 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1630 &replacements);
1632 /* If this is last_parm_insn, and any instructions were output
1633 after it to fix it up, then we must set last_parm_insn to
1634 the last such instruction emitted. */
1635 if (insn == last_parm_insn)
1636 last_parm_insn = PREV_INSN (next_insn);
1638 while (replacements)
1640 if (GET_CODE (replacements->new) == REG)
1642 rtx insert_before;
1643 rtx seq;
1645 /* OLD might be a (subreg (mem)). */
1646 if (GET_CODE (replacements->old) == SUBREG)
1647 replacements->old
1648 = fixup_memory_subreg (replacements->old, insn, 0);
1649 else
1650 replacements->old
1651 = fixup_stack_1 (replacements->old, insn);
1653 insert_before = insn;
1655 /* If we are changing the mode, do a conversion.
1656 This might be wasteful, but combine.c will
1657 eliminate much of the waste. */
1659 if (GET_MODE (replacements->new)
1660 != GET_MODE (replacements->old))
1662 start_sequence ();
1663 convert_move (replacements->new,
1664 replacements->old, unsignedp);
1665 seq = gen_sequence ();
1666 end_sequence ();
1668 else
1669 seq = gen_move_insn (replacements->new,
1670 replacements->old);
1672 emit_insn_before (seq, insert_before);
1675 replacements = replacements->next;
1679 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1680 But don't touch other insns referred to by reg-notes;
1681 we will get them elsewhere. */
1682 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1683 if (GET_CODE (note) != INSN_LIST)
1684 XEXP (note, 0)
1685 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1687 insn = next;
1691 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1692 See if the rtx expression at *LOC in INSN needs to be changed.
1694 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1695 contain a list of original rtx's and replacements. If we find that we need
1696 to modify this insn by replacing a memory reference with a pseudo or by
1697 making a new MEM to implement a SUBREG, we consult that list to see if
1698 we have already chosen a replacement. If none has already been allocated,
1699 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1700 or the SUBREG, as appropriate, to the pseudo. */
1702 static void
1703 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1704 register rtx var;
1705 enum machine_mode promoted_mode;
1706 register rtx *loc;
1707 rtx insn;
1708 struct fixup_replacement **replacements;
1710 register int i;
1711 register rtx x = *loc;
1712 RTX_CODE code = GET_CODE (x);
1713 register char *fmt;
1714 register rtx tem, tem1;
1715 struct fixup_replacement *replacement;
1717 switch (code)
1719 case MEM:
1720 if (var == x)
1722 /* If we already have a replacement, use it. Otherwise,
1723 try to fix up this address in case it is invalid. */
1725 replacement = find_fixup_replacement (replacements, var);
1726 if (replacement->new)
1728 *loc = replacement->new;
1729 return;
1732 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1734 /* Unless we are forcing memory to register or we changed the mode,
1735 we can leave things the way they are if the insn is valid. */
1737 INSN_CODE (insn) = -1;
1738 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1739 && recog_memoized (insn) >= 0)
1740 return;
1742 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1743 return;
1746 /* If X contains VAR, we need to unshare it here so that we update
1747 each occurrence separately. But all identical MEMs in one insn
1748 must be replaced with the same rtx because of the possibility of
1749 MATCH_DUPs. */
1751 if (reg_mentioned_p (var, x))
1753 replacement = find_fixup_replacement (replacements, x);
1754 if (replacement->new == 0)
1755 replacement->new = copy_most_rtx (x, var);
1757 *loc = x = replacement->new;
1759 break;
1761 case REG:
1762 case CC0:
1763 case PC:
1764 case CONST_INT:
1765 case CONST:
1766 case SYMBOL_REF:
1767 case LABEL_REF:
1768 case CONST_DOUBLE:
1769 return;
1771 case SIGN_EXTRACT:
1772 case ZERO_EXTRACT:
1773 /* Note that in some cases those types of expressions are altered
1774 by optimize_bit_field, and do not survive to get here. */
1775 if (XEXP (x, 0) == var
1776 || (GET_CODE (XEXP (x, 0)) == SUBREG
1777 && SUBREG_REG (XEXP (x, 0)) == var))
1779 /* Get TEM as a valid MEM in the mode presently in the insn.
1781 We don't worry about the possibility of MATCH_DUP here; it
1782 is highly unlikely and would be tricky to handle. */
1784 tem = XEXP (x, 0);
1785 if (GET_CODE (tem) == SUBREG)
1787 if (GET_MODE_BITSIZE (GET_MODE (tem))
1788 > GET_MODE_BITSIZE (GET_MODE (var)))
1790 replacement = find_fixup_replacement (replacements, var);
1791 if (replacement->new == 0)
1792 replacement->new = gen_reg_rtx (GET_MODE (var));
1793 SUBREG_REG (tem) = replacement->new;
1795 else
1796 tem = fixup_memory_subreg (tem, insn, 0);
1798 else
1799 tem = fixup_stack_1 (tem, insn);
1801 /* Unless we want to load from memory, get TEM into the proper mode
1802 for an extract from memory. This can only be done if the
1803 extract is at a constant position and length. */
1805 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1806 && GET_CODE (XEXP (x, 2)) == CONST_INT
1807 && ! mode_dependent_address_p (XEXP (tem, 0))
1808 && ! MEM_VOLATILE_P (tem))
1810 enum machine_mode wanted_mode = VOIDmode;
1811 enum machine_mode is_mode = GET_MODE (tem);
1812 int width = INTVAL (XEXP (x, 1));
1813 int pos = INTVAL (XEXP (x, 2));
1815 #ifdef HAVE_extzv
1816 if (GET_CODE (x) == ZERO_EXTRACT)
1817 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1818 #endif
1819 #ifdef HAVE_extv
1820 if (GET_CODE (x) == SIGN_EXTRACT)
1821 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1822 #endif
1823 /* If we have a narrower mode, we can do something. */
1824 if (wanted_mode != VOIDmode
1825 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1827 int offset = pos / BITS_PER_UNIT;
1828 rtx old_pos = XEXP (x, 2);
1829 rtx newmem;
1831 /* If the bytes and bits are counted differently, we
1832 must adjust the offset. */
1833 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1834 offset = (GET_MODE_SIZE (is_mode)
1835 - GET_MODE_SIZE (wanted_mode) - offset);
1837 pos %= GET_MODE_BITSIZE (wanted_mode);
1839 newmem = gen_rtx (MEM, wanted_mode,
1840 plus_constant (XEXP (tem, 0), offset));
1841 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1842 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1843 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1845 /* Make the change and see if the insn remains valid. */
1846 INSN_CODE (insn) = -1;
1847 XEXP (x, 0) = newmem;
1848 XEXP (x, 2) = GEN_INT (pos);
1850 if (recog_memoized (insn) >= 0)
1851 return;
1853 /* Otherwise, restore old position. XEXP (x, 0) will be
1854 restored later. */
1855 XEXP (x, 2) = old_pos;
1859 /* If we get here, the bitfield extract insn can't accept a memory
1860 reference. Copy the input into a register. */
1862 tem1 = gen_reg_rtx (GET_MODE (tem));
1863 emit_insn_before (gen_move_insn (tem1, tem), insn);
1864 XEXP (x, 0) = tem1;
1865 return;
1867 break;
1869 case SUBREG:
1870 if (SUBREG_REG (x) == var)
1872 /* If this is a special SUBREG made because VAR was promoted
1873 from a wider mode, replace it with VAR and call ourself
1874 recursively, this time saying that the object previously
1875 had its current mode (by virtue of the SUBREG). */
1877 if (SUBREG_PROMOTED_VAR_P (x))
1879 *loc = var;
1880 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1881 return;
1884 /* If this SUBREG makes VAR wider, it has become a paradoxical
1885 SUBREG with VAR in memory, but these aren't allowed at this
1886 stage of the compilation. So load VAR into a pseudo and take
1887 a SUBREG of that pseudo. */
1888 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1890 replacement = find_fixup_replacement (replacements, var);
1891 if (replacement->new == 0)
1892 replacement->new = gen_reg_rtx (GET_MODE (var));
1893 SUBREG_REG (x) = replacement->new;
1894 return;
1897 /* See if we have already found a replacement for this SUBREG.
1898 If so, use it. Otherwise, make a MEM and see if the insn
1899 is recognized. If not, or if we should force MEM into a register,
1900 make a pseudo for this SUBREG. */
1901 replacement = find_fixup_replacement (replacements, x);
1902 if (replacement->new)
1904 *loc = replacement->new;
1905 return;
1908 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1910 INSN_CODE (insn) = -1;
1911 if (! flag_force_mem && recog_memoized (insn) >= 0)
1912 return;
1914 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1915 return;
1917 break;
1919 case SET:
1920 /* First do special simplification of bit-field references. */
1921 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1922 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1923 optimize_bit_field (x, insn, 0);
1924 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1925 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1926 optimize_bit_field (x, insn, NULL_PTR);
1928 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1929 into a register and then store it back out. */
1930 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1931 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1932 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1933 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1934 > GET_MODE_SIZE (GET_MODE (var))))
1936 replacement = find_fixup_replacement (replacements, var);
1937 if (replacement->new == 0)
1938 replacement->new = gen_reg_rtx (GET_MODE (var));
1940 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1941 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1944 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1945 insn into a pseudo and store the low part of the pseudo into VAR. */
1946 if (GET_CODE (SET_DEST (x)) == SUBREG
1947 && SUBREG_REG (SET_DEST (x)) == var
1948 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1949 > GET_MODE_SIZE (GET_MODE (var))))
1951 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1952 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1953 tem)),
1954 insn);
1955 break;
1959 rtx dest = SET_DEST (x);
1960 rtx src = SET_SRC (x);
1961 rtx outerdest = dest;
1963 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1964 || GET_CODE (dest) == SIGN_EXTRACT
1965 || GET_CODE (dest) == ZERO_EXTRACT)
1966 dest = XEXP (dest, 0);
1968 if (GET_CODE (src) == SUBREG)
1969 src = XEXP (src, 0);
1971 /* If VAR does not appear at the top level of the SET
1972 just scan the lower levels of the tree. */
1974 if (src != var && dest != var)
1975 break;
1977 /* We will need to rerecognize this insn. */
1978 INSN_CODE (insn) = -1;
1980 #ifdef HAVE_insv
1981 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1983 /* Since this case will return, ensure we fixup all the
1984 operands here. */
1985 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1986 insn, replacements);
1987 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1988 insn, replacements);
1989 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1990 insn, replacements);
1992 tem = XEXP (outerdest, 0);
1994 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1995 that may appear inside a ZERO_EXTRACT.
1996 This was legitimate when the MEM was a REG. */
1997 if (GET_CODE (tem) == SUBREG
1998 && SUBREG_REG (tem) == var)
1999 tem = fixup_memory_subreg (tem, insn, 0);
2000 else
2001 tem = fixup_stack_1 (tem, insn);
2003 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2004 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2005 && ! mode_dependent_address_p (XEXP (tem, 0))
2006 && ! MEM_VOLATILE_P (tem))
2008 enum machine_mode wanted_mode
2009 = insn_operand_mode[(int) CODE_FOR_insv][0];
2010 enum machine_mode is_mode = GET_MODE (tem);
2011 int width = INTVAL (XEXP (outerdest, 1));
2012 int pos = INTVAL (XEXP (outerdest, 2));
2014 /* If we have a narrower mode, we can do something. */
2015 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2017 int offset = pos / BITS_PER_UNIT;
2018 rtx old_pos = XEXP (outerdest, 2);
2019 rtx newmem;
2021 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2022 offset = (GET_MODE_SIZE (is_mode)
2023 - GET_MODE_SIZE (wanted_mode) - offset);
2025 pos %= GET_MODE_BITSIZE (wanted_mode);
2027 newmem = gen_rtx (MEM, wanted_mode,
2028 plus_constant (XEXP (tem, 0), offset));
2029 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2030 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2031 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2033 /* Make the change and see if the insn remains valid. */
2034 INSN_CODE (insn) = -1;
2035 XEXP (outerdest, 0) = newmem;
2036 XEXP (outerdest, 2) = GEN_INT (pos);
2038 if (recog_memoized (insn) >= 0)
2039 return;
2041 /* Otherwise, restore old position. XEXP (x, 0) will be
2042 restored later. */
2043 XEXP (outerdest, 2) = old_pos;
2047 /* If we get here, the bit-field store doesn't allow memory
2048 or isn't located at a constant position. Load the value into
2049 a register, do the store, and put it back into memory. */
2051 tem1 = gen_reg_rtx (GET_MODE (tem));
2052 emit_insn_before (gen_move_insn (tem1, tem), insn);
2053 emit_insn_after (gen_move_insn (tem, tem1), insn);
2054 XEXP (outerdest, 0) = tem1;
2055 return;
2057 #endif
2059 /* STRICT_LOW_PART is a no-op on memory references
2060 and it can cause combinations to be unrecognizable,
2061 so eliminate it. */
2063 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2064 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2066 /* A valid insn to copy VAR into or out of a register
2067 must be left alone, to avoid an infinite loop here.
2068 If the reference to VAR is by a subreg, fix that up,
2069 since SUBREG is not valid for a memref.
2070 Also fix up the address of the stack slot.
2072 Note that we must not try to recognize the insn until
2073 after we know that we have valid addresses and no
2074 (subreg (mem ...) ...) constructs, since these interfere
2075 with determining the validity of the insn. */
2077 if ((SET_SRC (x) == var
2078 || (GET_CODE (SET_SRC (x)) == SUBREG
2079 && SUBREG_REG (SET_SRC (x)) == var))
2080 && (GET_CODE (SET_DEST (x)) == REG
2081 || (GET_CODE (SET_DEST (x)) == SUBREG
2082 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2083 && GET_MODE (var) == promoted_mode
2084 && x == single_set (insn))
2086 rtx pat;
2088 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2089 if (replacement->new)
2090 SET_SRC (x) = replacement->new;
2091 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2092 SET_SRC (x) = replacement->new
2093 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2094 else
2095 SET_SRC (x) = replacement->new
2096 = fixup_stack_1 (SET_SRC (x), insn);
2098 if (recog_memoized (insn) >= 0)
2099 return;
2101 /* INSN is not valid, but we know that we want to
2102 copy SET_SRC (x) to SET_DEST (x) in some way. So
2103 we generate the move and see whether it requires more
2104 than one insn. If it does, we emit those insns and
2105 delete INSN. Otherwise, we an just replace the pattern
2106 of INSN; we have already verified above that INSN has
2107 no other function that to do X. */
2109 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2110 if (GET_CODE (pat) == SEQUENCE)
2112 emit_insn_after (pat, insn);
2113 PUT_CODE (insn, NOTE);
2114 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2115 NOTE_SOURCE_FILE (insn) = 0;
2117 else
2118 PATTERN (insn) = pat;
2120 return;
2123 if ((SET_DEST (x) == var
2124 || (GET_CODE (SET_DEST (x)) == SUBREG
2125 && SUBREG_REG (SET_DEST (x)) == var))
2126 && (GET_CODE (SET_SRC (x)) == REG
2127 || (GET_CODE (SET_SRC (x)) == SUBREG
2128 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2129 && GET_MODE (var) == promoted_mode
2130 && x == single_set (insn))
2132 rtx pat;
2134 if (GET_CODE (SET_DEST (x)) == SUBREG)
2135 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2136 else
2137 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2139 if (recog_memoized (insn) >= 0)
2140 return;
2142 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2143 if (GET_CODE (pat) == SEQUENCE)
2145 emit_insn_after (pat, insn);
2146 PUT_CODE (insn, NOTE);
2147 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2148 NOTE_SOURCE_FILE (insn) = 0;
2150 else
2151 PATTERN (insn) = pat;
2153 return;
2156 /* Otherwise, storing into VAR must be handled specially
2157 by storing into a temporary and copying that into VAR
2158 with a new insn after this one. Note that this case
2159 will be used when storing into a promoted scalar since
2160 the insn will now have different modes on the input
2161 and output and hence will be invalid (except for the case
2162 of setting it to a constant, which does not need any
2163 change if it is valid). We generate extra code in that case,
2164 but combine.c will eliminate it. */
2166 if (dest == var)
2168 rtx temp;
2169 rtx fixeddest = SET_DEST (x);
2171 /* STRICT_LOW_PART can be discarded, around a MEM. */
2172 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2173 fixeddest = XEXP (fixeddest, 0);
2174 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2175 if (GET_CODE (fixeddest) == SUBREG)
2177 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2178 promoted_mode = GET_MODE (fixeddest);
2180 else
2181 fixeddest = fixup_stack_1 (fixeddest, insn);
2183 temp = gen_reg_rtx (promoted_mode);
2185 emit_insn_after (gen_move_insn (fixeddest,
2186 gen_lowpart (GET_MODE (fixeddest),
2187 temp)),
2188 insn);
2190 SET_DEST (x) = temp;
2195 /* Nothing special about this RTX; fix its operands. */
2197 fmt = GET_RTX_FORMAT (code);
2198 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2200 if (fmt[i] == 'e')
2201 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2202 if (fmt[i] == 'E')
2204 register int j;
2205 for (j = 0; j < XVECLEN (x, i); j++)
2206 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2207 insn, replacements);
2212 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2213 return an rtx (MEM:m1 newaddr) which is equivalent.
2214 If any insns must be emitted to compute NEWADDR, put them before INSN.
2216 UNCRITICAL nonzero means accept paradoxical subregs.
2217 This is used for subregs found inside REG_NOTES. */
2219 static rtx
2220 fixup_memory_subreg (x, insn, uncritical)
2221 rtx x;
2222 rtx insn;
2223 int uncritical;
2225 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2226 rtx addr = XEXP (SUBREG_REG (x), 0);
2227 enum machine_mode mode = GET_MODE (x);
2228 rtx saved, result;
2230 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2231 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2232 && ! uncritical)
2233 abort ();
2235 if (BYTES_BIG_ENDIAN)
2236 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2237 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2238 addr = plus_constant (addr, offset);
2239 if (!flag_force_addr && memory_address_p (mode, addr))
2240 /* Shortcut if no insns need be emitted. */
2241 return change_address (SUBREG_REG (x), mode, addr);
2242 start_sequence ();
2243 result = change_address (SUBREG_REG (x), mode, addr);
2244 emit_insn_before (gen_sequence (), insn);
2245 end_sequence ();
2246 return result;
2249 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2250 Replace subexpressions of X in place.
2251 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2252 Otherwise return X, with its contents possibly altered.
2254 If any insns must be emitted to compute NEWADDR, put them before INSN.
2256 UNCRITICAL is as in fixup_memory_subreg. */
2258 static rtx
2259 walk_fixup_memory_subreg (x, insn, uncritical)
2260 register rtx x;
2261 rtx insn;
2262 int uncritical;
2264 register enum rtx_code code;
2265 register char *fmt;
2266 register int i;
2268 if (x == 0)
2269 return 0;
2271 code = GET_CODE (x);
2273 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2274 return fixup_memory_subreg (x, insn, uncritical);
2276 /* Nothing special about this RTX; fix its operands. */
2278 fmt = GET_RTX_FORMAT (code);
2279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2281 if (fmt[i] == 'e')
2282 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2283 if (fmt[i] == 'E')
2285 register int j;
2286 for (j = 0; j < XVECLEN (x, i); j++)
2287 XVECEXP (x, i, j)
2288 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2291 return x;
2294 /* For each memory ref within X, if it refers to a stack slot
2295 with an out of range displacement, put the address in a temp register
2296 (emitting new insns before INSN to load these registers)
2297 and alter the memory ref to use that register.
2298 Replace each such MEM rtx with a copy, to avoid clobberage. */
2300 static rtx
2301 fixup_stack_1 (x, insn)
2302 rtx x;
2303 rtx insn;
2305 register int i;
2306 register RTX_CODE code = GET_CODE (x);
2307 register char *fmt;
2309 if (code == MEM)
2311 register rtx ad = XEXP (x, 0);
2312 /* If we have address of a stack slot but it's not valid
2313 (displacement is too large), compute the sum in a register. */
2314 if (GET_CODE (ad) == PLUS
2315 && GET_CODE (XEXP (ad, 0)) == REG
2316 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2317 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2318 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2319 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2321 rtx temp, seq;
2322 if (memory_address_p (GET_MODE (x), ad))
2323 return x;
2325 start_sequence ();
2326 temp = copy_to_reg (ad);
2327 seq = gen_sequence ();
2328 end_sequence ();
2329 emit_insn_before (seq, insn);
2330 return change_address (x, VOIDmode, temp);
2332 return x;
2335 fmt = GET_RTX_FORMAT (code);
2336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2338 if (fmt[i] == 'e')
2339 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2340 if (fmt[i] == 'E')
2342 register int j;
2343 for (j = 0; j < XVECLEN (x, i); j++)
2344 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2347 return x;
2350 /* Optimization: a bit-field instruction whose field
2351 happens to be a byte or halfword in memory
2352 can be changed to a move instruction.
2354 We call here when INSN is an insn to examine or store into a bit-field.
2355 BODY is the SET-rtx to be altered.
2357 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2358 (Currently this is called only from function.c, and EQUIV_MEM
2359 is always 0.) */
2361 static void
2362 optimize_bit_field (body, insn, equiv_mem)
2363 rtx body;
2364 rtx insn;
2365 rtx *equiv_mem;
2367 register rtx bitfield;
2368 int destflag;
2369 rtx seq = 0;
2370 enum machine_mode mode;
2372 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2373 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2374 bitfield = SET_DEST (body), destflag = 1;
2375 else
2376 bitfield = SET_SRC (body), destflag = 0;
2378 /* First check that the field being stored has constant size and position
2379 and is in fact a byte or halfword suitably aligned. */
2381 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2382 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2383 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2384 != BLKmode)
2385 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2387 register rtx memref = 0;
2389 /* Now check that the containing word is memory, not a register,
2390 and that it is safe to change the machine mode. */
2392 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2393 memref = XEXP (bitfield, 0);
2394 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2395 && equiv_mem != 0)
2396 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2397 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2398 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2399 memref = SUBREG_REG (XEXP (bitfield, 0));
2400 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2401 && equiv_mem != 0
2402 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2403 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2405 if (memref
2406 && ! mode_dependent_address_p (XEXP (memref, 0))
2407 && ! MEM_VOLATILE_P (memref))
2409 /* Now adjust the address, first for any subreg'ing
2410 that we are now getting rid of,
2411 and then for which byte of the word is wanted. */
2413 register int offset = INTVAL (XEXP (bitfield, 2));
2414 rtx insns;
2416 /* Adjust OFFSET to count bits from low-address byte. */
2417 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2418 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2419 - offset - INTVAL (XEXP (bitfield, 1)));
2421 /* Adjust OFFSET to count bytes from low-address byte. */
2422 offset /= BITS_PER_UNIT;
2423 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2425 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2426 if (BYTES_BIG_ENDIAN)
2427 offset -= (MIN (UNITS_PER_WORD,
2428 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2429 - MIN (UNITS_PER_WORD,
2430 GET_MODE_SIZE (GET_MODE (memref))));
2433 start_sequence ();
2434 memref = change_address (memref, mode,
2435 plus_constant (XEXP (memref, 0), offset));
2436 insns = get_insns ();
2437 end_sequence ();
2438 emit_insns_before (insns, insn);
2440 /* Store this memory reference where
2441 we found the bit field reference. */
2443 if (destflag)
2445 validate_change (insn, &SET_DEST (body), memref, 1);
2446 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2448 rtx src = SET_SRC (body);
2449 while (GET_CODE (src) == SUBREG
2450 && SUBREG_WORD (src) == 0)
2451 src = SUBREG_REG (src);
2452 if (GET_MODE (src) != GET_MODE (memref))
2453 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2454 validate_change (insn, &SET_SRC (body), src, 1);
2456 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2457 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2458 /* This shouldn't happen because anything that didn't have
2459 one of these modes should have got converted explicitly
2460 and then referenced through a subreg.
2461 This is so because the original bit-field was
2462 handled by agg_mode and so its tree structure had
2463 the same mode that memref now has. */
2464 abort ();
2466 else
2468 rtx dest = SET_DEST (body);
2470 while (GET_CODE (dest) == SUBREG
2471 && SUBREG_WORD (dest) == 0
2472 && (GET_MODE_CLASS (GET_MODE (dest))
2473 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2474 dest = SUBREG_REG (dest);
2476 validate_change (insn, &SET_DEST (body), dest, 1);
2478 if (GET_MODE (dest) == GET_MODE (memref))
2479 validate_change (insn, &SET_SRC (body), memref, 1);
2480 else
2482 /* Convert the mem ref to the destination mode. */
2483 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2485 start_sequence ();
2486 convert_move (newreg, memref,
2487 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2488 seq = get_insns ();
2489 end_sequence ();
2491 validate_change (insn, &SET_SRC (body), newreg, 1);
2495 /* See if we can convert this extraction or insertion into
2496 a simple move insn. We might not be able to do so if this
2497 was, for example, part of a PARALLEL.
2499 If we succeed, write out any needed conversions. If we fail,
2500 it is hard to guess why we failed, so don't do anything
2501 special; just let the optimization be suppressed. */
2503 if (apply_change_group () && seq)
2504 emit_insns_before (seq, insn);
2509 /* These routines are responsible for converting virtual register references
2510 to the actual hard register references once RTL generation is complete.
2512 The following four variables are used for communication between the
2513 routines. They contain the offsets of the virtual registers from their
2514 respective hard registers. */
2516 static int in_arg_offset;
2517 static int var_offset;
2518 static int dynamic_offset;
2519 static int out_arg_offset;
2521 /* In most machines, the stack pointer register is equivalent to the bottom
2522 of the stack. */
2524 #ifndef STACK_POINTER_OFFSET
2525 #define STACK_POINTER_OFFSET 0
2526 #endif
2528 /* If not defined, pick an appropriate default for the offset of dynamically
2529 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2530 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2532 #ifndef STACK_DYNAMIC_OFFSET
2534 #ifdef ACCUMULATE_OUTGOING_ARGS
2535 /* The bottom of the stack points to the actual arguments. If
2536 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2537 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2538 stack space for register parameters is not pushed by the caller, but
2539 rather part of the fixed stack areas and hence not included in
2540 `current_function_outgoing_args_size'. Nevertheless, we must allow
2541 for it when allocating stack dynamic objects. */
2543 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2544 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2545 (current_function_outgoing_args_size \
2546 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2548 #else
2549 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2550 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2551 #endif
2553 #else
2554 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2555 #endif
2556 #endif
2558 /* Pass through the INSNS of function FNDECL and convert virtual register
2559 references to hard register references. */
2561 void
2562 instantiate_virtual_regs (fndecl, insns)
2563 tree fndecl;
2564 rtx insns;
2566 rtx insn;
2568 /* Compute the offsets to use for this function. */
2569 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2570 var_offset = STARTING_FRAME_OFFSET;
2571 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2572 out_arg_offset = STACK_POINTER_OFFSET;
2574 /* Scan all variables and parameters of this function. For each that is
2575 in memory, instantiate all virtual registers if the result is a valid
2576 address. If not, we do it later. That will handle most uses of virtual
2577 regs on many machines. */
2578 instantiate_decls (fndecl, 1);
2580 /* Initialize recognition, indicating that volatile is OK. */
2581 init_recog ();
2583 /* Scan through all the insns, instantiating every virtual register still
2584 present. */
2585 for (insn = insns; insn; insn = NEXT_INSN (insn))
2586 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2587 || GET_CODE (insn) == CALL_INSN)
2589 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2590 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2593 /* Now instantiate the remaining register equivalences for debugging info.
2594 These will not be valid addresses. */
2595 instantiate_decls (fndecl, 0);
2597 /* Indicate that, from now on, assign_stack_local should use
2598 frame_pointer_rtx. */
2599 virtuals_instantiated = 1;
2602 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2603 all virtual registers in their DECL_RTL's.
2605 If VALID_ONLY, do this only if the resulting address is still valid.
2606 Otherwise, always do it. */
2608 static void
2609 instantiate_decls (fndecl, valid_only)
2610 tree fndecl;
2611 int valid_only;
2613 tree decl;
2615 if (DECL_SAVED_INSNS (fndecl))
2616 /* When compiling an inline function, the obstack used for
2617 rtl allocation is the maybepermanent_obstack. Calling
2618 `resume_temporary_allocation' switches us back to that
2619 obstack while we process this function's parameters. */
2620 resume_temporary_allocation ();
2622 /* Process all parameters of the function. */
2623 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2625 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2626 valid_only);
2627 instantiate_decl (DECL_INCOMING_RTL (decl),
2628 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2631 /* Now process all variables defined in the function or its subblocks. */
2632 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2634 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2636 /* Save all rtl allocated for this function by raising the
2637 high-water mark on the maybepermanent_obstack. */
2638 preserve_data ();
2639 /* All further rtl allocation is now done in the current_obstack. */
2640 rtl_in_current_obstack ();
2644 /* Subroutine of instantiate_decls: Process all decls in the given
2645 BLOCK node and all its subblocks. */
2647 static void
2648 instantiate_decls_1 (let, valid_only)
2649 tree let;
2650 int valid_only;
2652 tree t;
2654 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2655 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2656 valid_only);
2658 /* Process all subblocks. */
2659 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2660 instantiate_decls_1 (t, valid_only);
2663 /* Subroutine of the preceding procedures: Given RTL representing a
2664 decl and the size of the object, do any instantiation required.
2666 If VALID_ONLY is non-zero, it means that the RTL should only be
2667 changed if the new address is valid. */
2669 static void
2670 instantiate_decl (x, size, valid_only)
2671 rtx x;
2672 int size;
2673 int valid_only;
2675 enum machine_mode mode;
2676 rtx addr;
2678 /* If this is not a MEM, no need to do anything. Similarly if the
2679 address is a constant or a register that is not a virtual register. */
2681 if (x == 0 || GET_CODE (x) != MEM)
2682 return;
2684 addr = XEXP (x, 0);
2685 if (CONSTANT_P (addr)
2686 || (GET_CODE (addr) == REG
2687 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2688 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2689 return;
2691 /* If we should only do this if the address is valid, copy the address.
2692 We need to do this so we can undo any changes that might make the
2693 address invalid. This copy is unfortunate, but probably can't be
2694 avoided. */
2696 if (valid_only)
2697 addr = copy_rtx (addr);
2699 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2701 if (valid_only)
2703 /* Now verify that the resulting address is valid for every integer or
2704 floating-point mode up to and including SIZE bytes long. We do this
2705 since the object might be accessed in any mode and frame addresses
2706 are shared. */
2708 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2709 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2710 mode = GET_MODE_WIDER_MODE (mode))
2711 if (! memory_address_p (mode, addr))
2712 return;
2714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2715 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2716 mode = GET_MODE_WIDER_MODE (mode))
2717 if (! memory_address_p (mode, addr))
2718 return;
2721 /* Put back the address now that we have updated it and we either know
2722 it is valid or we don't care whether it is valid. */
2724 XEXP (x, 0) = addr;
2727 /* Given a pointer to a piece of rtx and an optional pointer to the
2728 containing object, instantiate any virtual registers present in it.
2730 If EXTRA_INSNS, we always do the replacement and generate
2731 any extra insns before OBJECT. If it zero, we do nothing if replacement
2732 is not valid.
2734 Return 1 if we either had nothing to do or if we were able to do the
2735 needed replacement. Return 0 otherwise; we only return zero if
2736 EXTRA_INSNS is zero.
2738 We first try some simple transformations to avoid the creation of extra
2739 pseudos. */
2741 static int
2742 instantiate_virtual_regs_1 (loc, object, extra_insns)
2743 rtx *loc;
2744 rtx object;
2745 int extra_insns;
2747 rtx x;
2748 RTX_CODE code;
2749 rtx new = 0;
2750 int offset;
2751 rtx temp;
2752 rtx seq;
2753 int i, j;
2754 char *fmt;
2756 /* Re-start here to avoid recursion in common cases. */
2757 restart:
2759 x = *loc;
2760 if (x == 0)
2761 return 1;
2763 code = GET_CODE (x);
2765 /* Check for some special cases. */
2766 switch (code)
2768 case CONST_INT:
2769 case CONST_DOUBLE:
2770 case CONST:
2771 case SYMBOL_REF:
2772 case CODE_LABEL:
2773 case PC:
2774 case CC0:
2775 case ASM_INPUT:
2776 case ADDR_VEC:
2777 case ADDR_DIFF_VEC:
2778 case RETURN:
2779 return 1;
2781 case SET:
2782 /* We are allowed to set the virtual registers. This means that
2783 that the actual register should receive the source minus the
2784 appropriate offset. This is used, for example, in the handling
2785 of non-local gotos. */
2786 if (SET_DEST (x) == virtual_incoming_args_rtx)
2787 new = arg_pointer_rtx, offset = - in_arg_offset;
2788 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2789 new = frame_pointer_rtx, offset = - var_offset;
2790 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2791 new = stack_pointer_rtx, offset = - dynamic_offset;
2792 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2793 new = stack_pointer_rtx, offset = - out_arg_offset;
2795 if (new)
2797 /* The only valid sources here are PLUS or REG. Just do
2798 the simplest possible thing to handle them. */
2799 if (GET_CODE (SET_SRC (x)) != REG
2800 && GET_CODE (SET_SRC (x)) != PLUS)
2801 abort ();
2803 start_sequence ();
2804 if (GET_CODE (SET_SRC (x)) != REG)
2805 temp = force_operand (SET_SRC (x), NULL_RTX);
2806 else
2807 temp = SET_SRC (x);
2808 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2809 seq = get_insns ();
2810 end_sequence ();
2812 emit_insns_before (seq, object);
2813 SET_DEST (x) = new;
2815 if (!validate_change (object, &SET_SRC (x), temp, 0)
2816 || ! extra_insns)
2817 abort ();
2819 return 1;
2822 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2823 loc = &SET_SRC (x);
2824 goto restart;
2826 case PLUS:
2827 /* Handle special case of virtual register plus constant. */
2828 if (CONSTANT_P (XEXP (x, 1)))
2830 rtx old, new_offset;
2832 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2833 if (GET_CODE (XEXP (x, 0)) == PLUS)
2835 rtx inner = XEXP (XEXP (x, 0), 0);
2837 if (inner == virtual_incoming_args_rtx)
2838 new = arg_pointer_rtx, offset = in_arg_offset;
2839 else if (inner == virtual_stack_vars_rtx)
2840 new = frame_pointer_rtx, offset = var_offset;
2841 else if (inner == virtual_stack_dynamic_rtx)
2842 new = stack_pointer_rtx, offset = dynamic_offset;
2843 else if (inner == virtual_outgoing_args_rtx)
2844 new = stack_pointer_rtx, offset = out_arg_offset;
2845 else
2847 loc = &XEXP (x, 0);
2848 goto restart;
2851 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2852 extra_insns);
2853 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2856 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2857 new = arg_pointer_rtx, offset = in_arg_offset;
2858 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2859 new = frame_pointer_rtx, offset = var_offset;
2860 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2861 new = stack_pointer_rtx, offset = dynamic_offset;
2862 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2863 new = stack_pointer_rtx, offset = out_arg_offset;
2864 else
2866 /* We know the second operand is a constant. Unless the
2867 first operand is a REG (which has been already checked),
2868 it needs to be checked. */
2869 if (GET_CODE (XEXP (x, 0)) != REG)
2871 loc = &XEXP (x, 0);
2872 goto restart;
2874 return 1;
2877 new_offset = plus_constant (XEXP (x, 1), offset);
2879 /* If the new constant is zero, try to replace the sum with just
2880 the register. */
2881 if (new_offset == const0_rtx
2882 && validate_change (object, loc, new, 0))
2883 return 1;
2885 /* Next try to replace the register and new offset.
2886 There are two changes to validate here and we can't assume that
2887 in the case of old offset equals new just changing the register
2888 will yield a valid insn. In the interests of a little efficiency,
2889 however, we only call validate change once (we don't queue up the
2890 changes and then call apply_change_group). */
2892 old = XEXP (x, 0);
2893 if (offset == 0
2894 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2895 : (XEXP (x, 0) = new,
2896 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2898 if (! extra_insns)
2900 XEXP (x, 0) = old;
2901 return 0;
2904 /* Otherwise copy the new constant into a register and replace
2905 constant with that register. */
2906 temp = gen_reg_rtx (Pmode);
2907 XEXP (x, 0) = new;
2908 if (validate_change (object, &XEXP (x, 1), temp, 0))
2909 emit_insn_before (gen_move_insn (temp, new_offset), object);
2910 else
2912 /* If that didn't work, replace this expression with a
2913 register containing the sum. */
2915 XEXP (x, 0) = old;
2916 new = gen_rtx (PLUS, Pmode, new, new_offset);
2918 start_sequence ();
2919 temp = force_operand (new, NULL_RTX);
2920 seq = get_insns ();
2921 end_sequence ();
2923 emit_insns_before (seq, object);
2924 if (! validate_change (object, loc, temp, 0)
2925 && ! validate_replace_rtx (x, temp, object))
2926 abort ();
2930 return 1;
2933 /* Fall through to generic two-operand expression case. */
2934 case EXPR_LIST:
2935 case CALL:
2936 case COMPARE:
2937 case MINUS:
2938 case MULT:
2939 case DIV: case UDIV:
2940 case MOD: case UMOD:
2941 case AND: case IOR: case XOR:
2942 case ROTATERT: case ROTATE:
2943 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2944 case NE: case EQ:
2945 case GE: case GT: case GEU: case GTU:
2946 case LE: case LT: case LEU: case LTU:
2947 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2948 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2949 loc = &XEXP (x, 0);
2950 goto restart;
2952 case MEM:
2953 /* Most cases of MEM that convert to valid addresses have already been
2954 handled by our scan of decls. The only special handling we
2955 need here is to make a copy of the rtx to ensure it isn't being
2956 shared if we have to change it to a pseudo.
2958 If the rtx is a simple reference to an address via a virtual register,
2959 it can potentially be shared. In such cases, first try to make it
2960 a valid address, which can also be shared. Otherwise, copy it and
2961 proceed normally.
2963 First check for common cases that need no processing. These are
2964 usually due to instantiation already being done on a previous instance
2965 of a shared rtx. */
2967 temp = XEXP (x, 0);
2968 if (CONSTANT_ADDRESS_P (temp)
2969 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2970 || temp == arg_pointer_rtx
2971 #endif
2972 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2973 || temp == hard_frame_pointer_rtx
2974 #endif
2975 || temp == frame_pointer_rtx)
2976 return 1;
2978 if (GET_CODE (temp) == PLUS
2979 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2980 && (XEXP (temp, 0) == frame_pointer_rtx
2981 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2982 || XEXP (temp, 0) == hard_frame_pointer_rtx
2983 #endif
2984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2985 || XEXP (temp, 0) == arg_pointer_rtx
2986 #endif
2988 return 1;
2990 if (temp == virtual_stack_vars_rtx
2991 || temp == virtual_incoming_args_rtx
2992 || (GET_CODE (temp) == PLUS
2993 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2994 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2995 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2997 /* This MEM may be shared. If the substitution can be done without
2998 the need to generate new pseudos, we want to do it in place
2999 so all copies of the shared rtx benefit. The call below will
3000 only make substitutions if the resulting address is still
3001 valid.
3003 Note that we cannot pass X as the object in the recursive call
3004 since the insn being processed may not allow all valid
3005 addresses. However, if we were not passed on object, we can
3006 only modify X without copying it if X will have a valid
3007 address.
3009 ??? Also note that this can still lose if OBJECT is an insn that
3010 has less restrictions on an address that some other insn.
3011 In that case, we will modify the shared address. This case
3012 doesn't seem very likely, though. One case where this could
3013 happen is in the case of a USE or CLOBBER reference, but we
3014 take care of that below. */
3016 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3017 object ? object : x, 0))
3018 return 1;
3020 /* Otherwise make a copy and process that copy. We copy the entire
3021 RTL expression since it might be a PLUS which could also be
3022 shared. */
3023 *loc = x = copy_rtx (x);
3026 /* Fall through to generic unary operation case. */
3027 case SUBREG:
3028 case STRICT_LOW_PART:
3029 case NEG: case NOT:
3030 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3031 case SIGN_EXTEND: case ZERO_EXTEND:
3032 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3033 case FLOAT: case FIX:
3034 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3035 case ABS:
3036 case SQRT:
3037 case FFS:
3038 /* These case either have just one operand or we know that we need not
3039 check the rest of the operands. */
3040 loc = &XEXP (x, 0);
3041 goto restart;
3043 case USE:
3044 case CLOBBER:
3045 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3046 go ahead and make the invalid one, but do it to a copy. For a REG,
3047 just make the recursive call, since there's no chance of a problem. */
3049 if ((GET_CODE (XEXP (x, 0)) == MEM
3050 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3052 || (GET_CODE (XEXP (x, 0)) == REG
3053 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3054 return 1;
3056 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3057 loc = &XEXP (x, 0);
3058 goto restart;
3060 case REG:
3061 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3062 in front of this insn and substitute the temporary. */
3063 if (x == virtual_incoming_args_rtx)
3064 new = arg_pointer_rtx, offset = in_arg_offset;
3065 else if (x == virtual_stack_vars_rtx)
3066 new = frame_pointer_rtx, offset = var_offset;
3067 else if (x == virtual_stack_dynamic_rtx)
3068 new = stack_pointer_rtx, offset = dynamic_offset;
3069 else if (x == virtual_outgoing_args_rtx)
3070 new = stack_pointer_rtx, offset = out_arg_offset;
3072 if (new)
3074 temp = plus_constant (new, offset);
3075 if (!validate_change (object, loc, temp, 0))
3077 if (! extra_insns)
3078 return 0;
3080 start_sequence ();
3081 temp = force_operand (temp, NULL_RTX);
3082 seq = get_insns ();
3083 end_sequence ();
3085 emit_insns_before (seq, object);
3086 if (! validate_change (object, loc, temp, 0)
3087 && ! validate_replace_rtx (x, temp, object))
3088 abort ();
3092 return 1;
3095 /* Scan all subexpressions. */
3096 fmt = GET_RTX_FORMAT (code);
3097 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3098 if (*fmt == 'e')
3100 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3101 return 0;
3103 else if (*fmt == 'E')
3104 for (j = 0; j < XVECLEN (x, i); j++)
3105 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3106 extra_insns))
3107 return 0;
3109 return 1;
3112 /* Optimization: assuming this function does not receive nonlocal gotos,
3113 delete the handlers for such, as well as the insns to establish
3114 and disestablish them. */
3116 static void
3117 delete_handlers ()
3119 rtx insn;
3120 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3122 /* Delete the handler by turning off the flag that would
3123 prevent jump_optimize from deleting it.
3124 Also permit deletion of the nonlocal labels themselves
3125 if nothing local refers to them. */
3126 if (GET_CODE (insn) == CODE_LABEL)
3128 tree t, last_t;
3130 LABEL_PRESERVE_P (insn) = 0;
3132 /* Remove it from the nonlocal_label list, to avoid confusing
3133 flow. */
3134 for (t = nonlocal_labels, last_t = 0; t;
3135 last_t = t, t = TREE_CHAIN (t))
3136 if (DECL_RTL (TREE_VALUE (t)) == insn)
3137 break;
3138 if (t)
3140 if (! last_t)
3141 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3142 else
3143 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3146 if (GET_CODE (insn) == INSN
3147 && ((nonlocal_goto_handler_slot != 0
3148 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3149 || (nonlocal_goto_stack_level != 0
3150 && reg_mentioned_p (nonlocal_goto_stack_level,
3151 PATTERN (insn)))))
3152 delete_insn (insn);
3156 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3157 of the current function. */
3160 nonlocal_label_rtx_list ()
3162 tree t;
3163 rtx x = 0;
3165 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3166 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3168 return x;
3171 /* Output a USE for any register use in RTL.
3172 This is used with -noreg to mark the extent of lifespan
3173 of any registers used in a user-visible variable's DECL_RTL. */
3175 void
3176 use_variable (rtl)
3177 rtx rtl;
3179 if (GET_CODE (rtl) == REG)
3180 /* This is a register variable. */
3181 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3182 else if (GET_CODE (rtl) == MEM
3183 && GET_CODE (XEXP (rtl, 0)) == REG
3184 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3185 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3186 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3187 /* This is a variable-sized structure. */
3188 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3191 /* Like use_variable except that it outputs the USEs after INSN
3192 instead of at the end of the insn-chain. */
3194 void
3195 use_variable_after (rtl, insn)
3196 rtx rtl, insn;
3198 if (GET_CODE (rtl) == REG)
3199 /* This is a register variable. */
3200 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3201 else if (GET_CODE (rtl) == MEM
3202 && GET_CODE (XEXP (rtl, 0)) == REG
3203 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3204 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3205 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3206 /* This is a variable-sized structure. */
3207 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3211 max_parm_reg_num ()
3213 return max_parm_reg;
3216 /* Return the first insn following those generated by `assign_parms'. */
3219 get_first_nonparm_insn ()
3221 if (last_parm_insn)
3222 return NEXT_INSN (last_parm_insn);
3223 return get_insns ();
3226 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3227 Crash if there is none. */
3230 get_first_block_beg ()
3232 register rtx searcher;
3233 register rtx insn = get_first_nonparm_insn ();
3235 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3236 if (GET_CODE (searcher) == NOTE
3237 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3238 return searcher;
3240 abort (); /* Invalid call to this function. (See comments above.) */
3241 return NULL_RTX;
3244 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3245 This means a type for which function calls must pass an address to the
3246 function or get an address back from the function.
3247 EXP may be a type node or an expression (whose type is tested). */
3250 aggregate_value_p (exp)
3251 tree exp;
3253 int i, regno, nregs;
3254 rtx reg;
3255 tree type;
3256 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3257 type = exp;
3258 else
3259 type = TREE_TYPE (exp);
3261 if (RETURN_IN_MEMORY (type))
3262 return 1;
3263 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3264 and thus can't be returned in registers. */
3265 if (TREE_ADDRESSABLE (type))
3266 return 1;
3267 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3268 return 1;
3269 /* Make sure we have suitable call-clobbered regs to return
3270 the value in; if not, we must return it in memory. */
3271 reg = hard_function_value (type, 0);
3273 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3274 it is OK. */
3275 if (GET_CODE (reg) != REG)
3276 return 0;
3278 regno = REGNO (reg);
3279 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3280 for (i = 0; i < nregs; i++)
3281 if (! call_used_regs[regno + i])
3282 return 1;
3283 return 0;
3286 /* Assign RTL expressions to the function's parameters.
3287 This may involve copying them into registers and using
3288 those registers as the RTL for them.
3290 If SECOND_TIME is non-zero it means that this function is being
3291 called a second time. This is done by integrate.c when a function's
3292 compilation is deferred. We need to come back here in case the
3293 FUNCTION_ARG macro computes items needed for the rest of the compilation
3294 (such as changing which registers are fixed or caller-saved). But suppress
3295 writing any insns or setting DECL_RTL of anything in this case. */
3297 void
3298 assign_parms (fndecl, second_time)
3299 tree fndecl;
3300 int second_time;
3302 register tree parm;
3303 register rtx entry_parm = 0;
3304 register rtx stack_parm = 0;
3305 CUMULATIVE_ARGS args_so_far;
3306 enum machine_mode promoted_mode, passed_mode;
3307 enum machine_mode nominal_mode, promoted_nominal_mode;
3308 int unsignedp;
3309 /* Total space needed so far for args on the stack,
3310 given as a constant and a tree-expression. */
3311 struct args_size stack_args_size;
3312 tree fntype = TREE_TYPE (fndecl);
3313 tree fnargs = DECL_ARGUMENTS (fndecl);
3314 /* This is used for the arg pointer when referring to stack args. */
3315 rtx internal_arg_pointer;
3316 /* This is a dummy PARM_DECL that we used for the function result if
3317 the function returns a structure. */
3318 tree function_result_decl = 0;
3319 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3320 int varargs_setup = 0;
3321 rtx conversion_insns = 0;
3323 /* Nonzero if the last arg is named `__builtin_va_alist',
3324 which is used on some machines for old-fashioned non-ANSI varargs.h;
3325 this should be stuck onto the stack as if it had arrived there. */
3326 int hide_last_arg
3327 = (current_function_varargs
3328 && fnargs
3329 && (parm = tree_last (fnargs)) != 0
3330 && DECL_NAME (parm)
3331 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3332 "__builtin_va_alist")));
3334 /* Nonzero if function takes extra anonymous args.
3335 This means the last named arg must be on the stack
3336 right before the anonymous ones. */
3337 int stdarg
3338 = (TYPE_ARG_TYPES (fntype) != 0
3339 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3340 != void_type_node));
3342 current_function_stdarg = stdarg;
3344 /* If the reg that the virtual arg pointer will be translated into is
3345 not a fixed reg or is the stack pointer, make a copy of the virtual
3346 arg pointer, and address parms via the copy. The frame pointer is
3347 considered fixed even though it is not marked as such.
3349 The second time through, simply use ap to avoid generating rtx. */
3351 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3352 || ! (fixed_regs[ARG_POINTER_REGNUM]
3353 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3354 && ! second_time)
3355 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3356 else
3357 internal_arg_pointer = virtual_incoming_args_rtx;
3358 current_function_internal_arg_pointer = internal_arg_pointer;
3360 stack_args_size.constant = 0;
3361 stack_args_size.var = 0;
3363 /* If struct value address is treated as the first argument, make it so. */
3364 if (aggregate_value_p (DECL_RESULT (fndecl))
3365 && ! current_function_returns_pcc_struct
3366 && struct_value_incoming_rtx == 0)
3368 tree type = build_pointer_type (TREE_TYPE (fntype));
3370 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3372 DECL_ARG_TYPE (function_result_decl) = type;
3373 TREE_CHAIN (function_result_decl) = fnargs;
3374 fnargs = function_result_decl;
3377 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3378 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3380 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3381 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3382 #else
3383 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3384 #endif
3386 /* We haven't yet found an argument that we must push and pretend the
3387 caller did. */
3388 current_function_pretend_args_size = 0;
3390 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3392 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3393 struct args_size stack_offset;
3394 struct args_size arg_size;
3395 int passed_pointer = 0;
3396 int did_conversion = 0;
3397 tree passed_type = DECL_ARG_TYPE (parm);
3398 tree nominal_type = TREE_TYPE (parm);
3400 /* Set LAST_NAMED if this is last named arg before some
3401 anonymous args. We treat it as if it were anonymous too. */
3402 int last_named = ((TREE_CHAIN (parm) == 0
3403 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3404 && (stdarg || current_function_varargs));
3406 if (TREE_TYPE (parm) == error_mark_node
3407 /* This can happen after weird syntax errors
3408 or if an enum type is defined among the parms. */
3409 || TREE_CODE (parm) != PARM_DECL
3410 || passed_type == NULL)
3412 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3413 const0_rtx);
3414 TREE_USED (parm) = 1;
3415 continue;
3418 /* For varargs.h function, save info about regs and stack space
3419 used by the individual args, not including the va_alist arg. */
3420 if (hide_last_arg && last_named)
3421 current_function_args_info = args_so_far;
3423 /* Find mode of arg as it is passed, and mode of arg
3424 as it should be during execution of this function. */
3425 passed_mode = TYPE_MODE (passed_type);
3426 nominal_mode = TYPE_MODE (nominal_type);
3428 /* If the parm's mode is VOID, its value doesn't matter,
3429 and avoid the usual things like emit_move_insn that could crash. */
3430 if (nominal_mode == VOIDmode)
3432 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3433 continue;
3436 /* If the parm is to be passed as a transparent union, use the
3437 type of the first field for the tests below. We have already
3438 verified that the modes are the same. */
3439 if (DECL_TRANSPARENT_UNION (parm)
3440 || TYPE_TRANSPARENT_UNION (passed_type))
3441 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3443 /* See if this arg was passed by invisible reference. It is if
3444 it is an object whose size depends on the contents of the
3445 object itself or if the machine requires these objects be passed
3446 that way. */
3448 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3449 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3450 || TREE_ADDRESSABLE (passed_type)
3451 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3452 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3453 passed_type, ! last_named)
3454 #endif
3457 passed_type = nominal_type = build_pointer_type (passed_type);
3458 passed_pointer = 1;
3459 passed_mode = nominal_mode = Pmode;
3462 promoted_mode = passed_mode;
3464 #ifdef PROMOTE_FUNCTION_ARGS
3465 /* Compute the mode in which the arg is actually extended to. */
3466 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3467 #endif
3469 /* Let machine desc say which reg (if any) the parm arrives in.
3470 0 means it arrives on the stack. */
3471 #ifdef FUNCTION_INCOMING_ARG
3472 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3473 passed_type, ! last_named);
3474 #else
3475 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3476 passed_type, ! last_named);
3477 #endif
3479 if (entry_parm == 0)
3480 promoted_mode = passed_mode;
3482 #ifdef SETUP_INCOMING_VARARGS
3483 /* If this is the last named parameter, do any required setup for
3484 varargs or stdargs. We need to know about the case of this being an
3485 addressable type, in which case we skip the registers it
3486 would have arrived in.
3488 For stdargs, LAST_NAMED will be set for two parameters, the one that
3489 is actually the last named, and the dummy parameter. We only
3490 want to do this action once.
3492 Also, indicate when RTL generation is to be suppressed. */
3493 if (last_named && !varargs_setup)
3495 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3496 current_function_pretend_args_size,
3497 second_time);
3498 varargs_setup = 1;
3500 #endif
3502 /* Determine parm's home in the stack,
3503 in case it arrives in the stack or we should pretend it did.
3505 Compute the stack position and rtx where the argument arrives
3506 and its size.
3508 There is one complexity here: If this was a parameter that would
3509 have been passed in registers, but wasn't only because it is
3510 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3511 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3512 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3513 0 as it was the previous time. */
3515 locate_and_pad_parm (promoted_mode, passed_type,
3516 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3518 #else
3519 #ifdef FUNCTION_INCOMING_ARG
3520 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3521 passed_type,
3522 (! last_named
3523 || varargs_setup)) != 0,
3524 #else
3525 FUNCTION_ARG (args_so_far, promoted_mode,
3526 passed_type,
3527 ! last_named || varargs_setup) != 0,
3528 #endif
3529 #endif
3530 fndecl, &stack_args_size, &stack_offset, &arg_size);
3532 if (! second_time)
3534 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3536 if (offset_rtx == const0_rtx)
3537 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3538 else
3539 stack_parm = gen_rtx (MEM, promoted_mode,
3540 gen_rtx (PLUS, Pmode,
3541 internal_arg_pointer, offset_rtx));
3543 /* If this is a memory ref that contains aggregate components,
3544 mark it as such for cse and loop optimize. Likewise if it
3545 is readonly. */
3546 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3547 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3550 /* If this parameter was passed both in registers and in the stack,
3551 use the copy on the stack. */
3552 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3553 entry_parm = 0;
3555 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3556 /* If this parm was passed part in regs and part in memory,
3557 pretend it arrived entirely in memory
3558 by pushing the register-part onto the stack.
3560 In the special case of a DImode or DFmode that is split,
3561 we could put it together in a pseudoreg directly,
3562 but for now that's not worth bothering with. */
3564 if (entry_parm)
3566 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3567 passed_type, ! last_named);
3569 if (nregs > 0)
3571 current_function_pretend_args_size
3572 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3573 / (PARM_BOUNDARY / BITS_PER_UNIT)
3574 * (PARM_BOUNDARY / BITS_PER_UNIT));
3576 if (! second_time)
3578 /* Handle calls that pass values in multiple non-contiguous
3579 locations. The Irix 6 ABI has examples of this. */
3580 if (GET_CODE (entry_parm) == PARALLEL)
3581 emit_group_store (validize_mem (stack_parm),
3582 entry_parm);
3583 else
3584 move_block_from_reg (REGNO (entry_parm),
3585 validize_mem (stack_parm), nregs,
3586 int_size_in_bytes (TREE_TYPE (parm)));
3588 entry_parm = stack_parm;
3591 #endif
3593 /* If we didn't decide this parm came in a register,
3594 by default it came on the stack. */
3595 if (entry_parm == 0)
3596 entry_parm = stack_parm;
3598 /* Record permanently how this parm was passed. */
3599 if (! second_time)
3600 DECL_INCOMING_RTL (parm) = entry_parm;
3602 /* If there is actually space on the stack for this parm,
3603 count it in stack_args_size; otherwise set stack_parm to 0
3604 to indicate there is no preallocated stack slot for the parm. */
3606 if (entry_parm == stack_parm
3607 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3608 /* On some machines, even if a parm value arrives in a register
3609 there is still an (uninitialized) stack slot allocated for it.
3611 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3612 whether this parameter already has a stack slot allocated,
3613 because an arg block exists only if current_function_args_size
3614 is larger than some threshold, and we haven't calculated that
3615 yet. So, for now, we just assume that stack slots never exist
3616 in this case. */
3617 || REG_PARM_STACK_SPACE (fndecl) > 0
3618 #endif
3621 stack_args_size.constant += arg_size.constant;
3622 if (arg_size.var)
3623 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3625 else
3626 /* No stack slot was pushed for this parm. */
3627 stack_parm = 0;
3629 /* Update info on where next arg arrives in registers. */
3631 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3632 passed_type, ! last_named);
3634 /* If this is our second time through, we are done with this parm. */
3635 if (second_time)
3636 continue;
3638 /* If we can't trust the parm stack slot to be aligned enough
3639 for its ultimate type, don't use that slot after entry.
3640 We'll make another stack slot, if we need one. */
3642 int thisparm_boundary
3643 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3645 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3646 stack_parm = 0;
3649 /* If parm was passed in memory, and we need to convert it on entry,
3650 don't store it back in that same slot. */
3651 if (entry_parm != 0
3652 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3653 stack_parm = 0;
3655 #if 0
3656 /* Now adjust STACK_PARM to the mode and precise location
3657 where this parameter should live during execution,
3658 if we discover that it must live in the stack during execution.
3659 To make debuggers happier on big-endian machines, we store
3660 the value in the last bytes of the space available. */
3662 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3663 && stack_parm != 0)
3665 rtx offset_rtx;
3667 if (BYTES_BIG_ENDIAN
3668 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3669 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3670 - GET_MODE_SIZE (nominal_mode));
3672 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3673 if (offset_rtx == const0_rtx)
3674 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3675 else
3676 stack_parm = gen_rtx (MEM, nominal_mode,
3677 gen_rtx (PLUS, Pmode,
3678 internal_arg_pointer, offset_rtx));
3680 /* If this is a memory ref that contains aggregate components,
3681 mark it as such for cse and loop optimize. */
3682 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3684 #endif /* 0 */
3686 #ifdef STACK_REGS
3687 /* We need this "use" info, because the gcc-register->stack-register
3688 converter in reg-stack.c needs to know which registers are active
3689 at the start of the function call. The actual parameter loading
3690 instructions are not always available then anymore, since they might
3691 have been optimised away. */
3693 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3694 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3695 #endif
3697 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3698 in the mode in which it arrives.
3699 STACK_PARM is an RTX for a stack slot where the parameter can live
3700 during the function (in case we want to put it there).
3701 STACK_PARM is 0 if no stack slot was pushed for it.
3703 Now output code if necessary to convert ENTRY_PARM to
3704 the type in which this function declares it,
3705 and store that result in an appropriate place,
3706 which may be a pseudo reg, may be STACK_PARM,
3707 or may be a local stack slot if STACK_PARM is 0.
3709 Set DECL_RTL to that place. */
3711 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3713 /* If a BLKmode arrives in registers, copy it to a stack slot.
3714 Handle calls that pass values in multiple non-contiguous
3715 locations. The Irix 6 ABI has examples of this. */
3716 if (GET_CODE (entry_parm) == REG
3717 || GET_CODE (entry_parm) == PARALLEL)
3719 int size_stored
3720 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3721 UNITS_PER_WORD);
3723 /* Note that we will be storing an integral number of words.
3724 So we have to be careful to ensure that we allocate an
3725 integral number of words. We do this below in the
3726 assign_stack_local if space was not allocated in the argument
3727 list. If it was, this will not work if PARM_BOUNDARY is not
3728 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3729 if it becomes a problem. */
3731 if (stack_parm == 0)
3733 stack_parm
3734 = assign_stack_local (GET_MODE (entry_parm),
3735 size_stored, 0);
3737 /* If this is a memory ref that contains aggregate
3738 components, mark it as such for cse and loop optimize. */
3739 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3742 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3743 abort ();
3745 if (TREE_READONLY (parm))
3746 RTX_UNCHANGING_P (stack_parm) = 1;
3748 /* Handle calls that pass values in multiple non-contiguous
3749 locations. The Irix 6 ABI has examples of this. */
3750 if (GET_CODE (entry_parm) == PARALLEL)
3751 emit_group_store (validize_mem (stack_parm), entry_parm);
3752 else
3753 move_block_from_reg (REGNO (entry_parm),
3754 validize_mem (stack_parm),
3755 size_stored / UNITS_PER_WORD,
3756 int_size_in_bytes (TREE_TYPE (parm)));
3758 DECL_RTL (parm) = stack_parm;
3760 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3761 && ! DECL_INLINE (fndecl))
3762 /* layout_decl may set this. */
3763 || TREE_ADDRESSABLE (parm)
3764 || TREE_SIDE_EFFECTS (parm)
3765 /* If -ffloat-store specified, don't put explicit
3766 float variables into registers. */
3767 || (flag_float_store
3768 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3769 /* Always assign pseudo to structure return or item passed
3770 by invisible reference. */
3771 || passed_pointer || parm == function_result_decl)
3773 /* Store the parm in a pseudoregister during the function, but we
3774 may need to do it in a wider mode. */
3776 register rtx parmreg;
3777 int regno, regnoi, regnor;
3779 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3781 promoted_nominal_mode
3782 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3784 parmreg = gen_reg_rtx (promoted_nominal_mode);
3785 mark_user_reg (parmreg);
3787 /* If this was an item that we received a pointer to, set DECL_RTL
3788 appropriately. */
3789 if (passed_pointer)
3791 DECL_RTL (parm)
3792 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3793 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3795 else
3796 DECL_RTL (parm) = parmreg;
3798 /* Copy the value into the register. */
3799 if (nominal_mode != passed_mode
3800 || promoted_nominal_mode != promoted_mode)
3802 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3803 mode, by the caller. We now have to convert it to
3804 NOMINAL_MODE, if different. However, PARMREG may be in
3805 a diffent mode than NOMINAL_MODE if it is being stored
3806 promoted.
3808 If ENTRY_PARM is a hard register, it might be in a register
3809 not valid for operating in its mode (e.g., an odd-numbered
3810 register for a DFmode). In that case, moves are the only
3811 thing valid, so we can't do a convert from there. This
3812 occurs when the calling sequence allow such misaligned
3813 usages.
3815 In addition, the conversion may involve a call, which could
3816 clobber parameters which haven't been copied to pseudo
3817 registers yet. Therefore, we must first copy the parm to
3818 a pseudo reg here, and save the conversion until after all
3819 parameters have been moved. */
3821 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3823 emit_move_insn (tempreg, validize_mem (entry_parm));
3825 push_to_sequence (conversion_insns);
3826 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3828 expand_assignment (parm,
3829 make_tree (nominal_type, tempreg), 0, 0);
3830 conversion_insns = get_insns ();
3831 did_conversion = 1;
3832 end_sequence ();
3834 else
3835 emit_move_insn (parmreg, validize_mem (entry_parm));
3837 /* If we were passed a pointer but the actual value
3838 can safely live in a register, put it in one. */
3839 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3840 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3841 && ! DECL_INLINE (fndecl))
3842 /* layout_decl may set this. */
3843 || TREE_ADDRESSABLE (parm)
3844 || TREE_SIDE_EFFECTS (parm)
3845 /* If -ffloat-store specified, don't put explicit
3846 float variables into registers. */
3847 || (flag_float_store
3848 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3850 /* We can't use nominal_mode, because it will have been set to
3851 Pmode above. We must use the actual mode of the parm. */
3852 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3853 mark_user_reg (parmreg);
3854 emit_move_insn (parmreg, DECL_RTL (parm));
3855 DECL_RTL (parm) = parmreg;
3856 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3857 now the parm. */
3858 stack_parm = 0;
3860 #ifdef FUNCTION_ARG_CALLEE_COPIES
3861 /* If we are passed an arg by reference and it is our responsibility
3862 to make a copy, do it now.
3863 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3864 original argument, so we must recreate them in the call to
3865 FUNCTION_ARG_CALLEE_COPIES. */
3866 /* ??? Later add code to handle the case that if the argument isn't
3867 modified, don't do the copy. */
3869 else if (passed_pointer
3870 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3871 TYPE_MODE (DECL_ARG_TYPE (parm)),
3872 DECL_ARG_TYPE (parm),
3873 ! last_named)
3874 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3876 rtx copy;
3877 tree type = DECL_ARG_TYPE (parm);
3879 /* This sequence may involve a library call perhaps clobbering
3880 registers that haven't been copied to pseudos yet. */
3882 push_to_sequence (conversion_insns);
3884 if (TYPE_SIZE (type) == 0
3885 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3886 /* This is a variable sized object. */
3887 copy = gen_rtx (MEM, BLKmode,
3888 allocate_dynamic_stack_space
3889 (expr_size (parm), NULL_RTX,
3890 TYPE_ALIGN (type)));
3891 else
3892 copy = assign_stack_temp (TYPE_MODE (type),
3893 int_size_in_bytes (type), 1);
3894 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3896 store_expr (parm, copy, 0);
3897 emit_move_insn (parmreg, XEXP (copy, 0));
3898 conversion_insns = get_insns ();
3899 did_conversion = 1;
3900 end_sequence ();
3902 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3904 /* In any case, record the parm's desired stack location
3905 in case we later discover it must live in the stack.
3907 If it is a COMPLEX value, store the stack location for both
3908 halves. */
3910 if (GET_CODE (parmreg) == CONCAT)
3911 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3912 else
3913 regno = REGNO (parmreg);
3915 if (regno >= nparmregs)
3917 rtx *new;
3918 int old_nparmregs = nparmregs;
3920 nparmregs = regno + 5;
3921 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3922 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3923 old_nparmregs * sizeof (rtx));
3924 bzero ((char *) (new + old_nparmregs),
3925 (nparmregs - old_nparmregs) * sizeof (rtx));
3926 parm_reg_stack_loc = new;
3929 if (GET_CODE (parmreg) == CONCAT)
3931 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3933 regnor = REGNO (gen_realpart (submode, parmreg));
3934 regnoi = REGNO (gen_imagpart (submode, parmreg));
3936 if (stack_parm != 0)
3938 parm_reg_stack_loc[regnor]
3939 = gen_realpart (submode, stack_parm);
3940 parm_reg_stack_loc[regnoi]
3941 = gen_imagpart (submode, stack_parm);
3943 else
3945 parm_reg_stack_loc[regnor] = 0;
3946 parm_reg_stack_loc[regnoi] = 0;
3949 else
3950 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3952 /* Mark the register as eliminable if we did no conversion
3953 and it was copied from memory at a fixed offset,
3954 and the arg pointer was not copied to a pseudo-reg.
3955 If the arg pointer is a pseudo reg or the offset formed
3956 an invalid address, such memory-equivalences
3957 as we make here would screw up life analysis for it. */
3958 if (nominal_mode == passed_mode
3959 && ! did_conversion
3960 && stack_parm != 0
3961 && GET_CODE (stack_parm) == MEM
3962 && stack_offset.var == 0
3963 && reg_mentioned_p (virtual_incoming_args_rtx,
3964 XEXP (stack_parm, 0)))
3966 rtx linsn = get_last_insn ();
3967 rtx sinsn, set;
3969 /* Mark complex types separately. */
3970 if (GET_CODE (parmreg) == CONCAT)
3971 /* Scan backwards for the set of the real and
3972 imaginary parts. */
3973 for (sinsn = linsn; sinsn != 0;
3974 sinsn = prev_nonnote_insn (sinsn))
3976 set = single_set (sinsn);
3977 if (set != 0
3978 && SET_DEST (set) == regno_reg_rtx [regnoi])
3979 REG_NOTES (sinsn)
3980 = gen_rtx (EXPR_LIST, REG_EQUIV,
3981 parm_reg_stack_loc[regnoi],
3982 REG_NOTES (sinsn));
3983 else if (set != 0
3984 && SET_DEST (set) == regno_reg_rtx [regnor])
3985 REG_NOTES (sinsn)
3986 = gen_rtx (EXPR_LIST, REG_EQUIV,
3987 parm_reg_stack_loc[regnor],
3988 REG_NOTES (sinsn));
3990 else if ((set = single_set (linsn)) != 0
3991 && SET_DEST (set) == parmreg)
3992 REG_NOTES (linsn)
3993 = gen_rtx (EXPR_LIST, REG_EQUIV,
3994 stack_parm, REG_NOTES (linsn));
3997 /* For pointer data type, suggest pointer register. */
3998 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3999 mark_reg_pointer (parmreg,
4000 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4001 / BITS_PER_UNIT));
4003 else
4005 /* Value must be stored in the stack slot STACK_PARM
4006 during function execution. */
4008 if (promoted_mode != nominal_mode)
4010 /* Conversion is required. */
4011 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4013 emit_move_insn (tempreg, validize_mem (entry_parm));
4015 push_to_sequence (conversion_insns);
4016 entry_parm = convert_to_mode (nominal_mode, tempreg,
4017 TREE_UNSIGNED (TREE_TYPE (parm)));
4018 conversion_insns = get_insns ();
4019 did_conversion = 1;
4020 end_sequence ();
4023 if (entry_parm != stack_parm)
4025 if (stack_parm == 0)
4027 stack_parm
4028 = assign_stack_local (GET_MODE (entry_parm),
4029 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4030 /* If this is a memory ref that contains aggregate components,
4031 mark it as such for cse and loop optimize. */
4032 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4035 if (promoted_mode != nominal_mode)
4037 push_to_sequence (conversion_insns);
4038 emit_move_insn (validize_mem (stack_parm),
4039 validize_mem (entry_parm));
4040 conversion_insns = get_insns ();
4041 end_sequence ();
4043 else
4044 emit_move_insn (validize_mem (stack_parm),
4045 validize_mem (entry_parm));
4048 DECL_RTL (parm) = stack_parm;
4051 /* If this "parameter" was the place where we are receiving the
4052 function's incoming structure pointer, set up the result. */
4053 if (parm == function_result_decl)
4055 tree result = DECL_RESULT (fndecl);
4056 tree restype = TREE_TYPE (result);
4058 DECL_RTL (result)
4059 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4061 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4064 if (TREE_THIS_VOLATILE (parm))
4065 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4066 if (TREE_READONLY (parm))
4067 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4070 /* Output all parameter conversion instructions (possibly including calls)
4071 now that all parameters have been copied out of hard registers. */
4072 emit_insns (conversion_insns);
4074 max_parm_reg = max_reg_num ();
4075 last_parm_insn = get_last_insn ();
4077 current_function_args_size = stack_args_size.constant;
4079 /* Adjust function incoming argument size for alignment and
4080 minimum length. */
4082 #ifdef REG_PARM_STACK_SPACE
4083 #ifndef MAYBE_REG_PARM_STACK_SPACE
4084 current_function_args_size = MAX (current_function_args_size,
4085 REG_PARM_STACK_SPACE (fndecl));
4086 #endif
4087 #endif
4089 #ifdef STACK_BOUNDARY
4090 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4092 current_function_args_size
4093 = ((current_function_args_size + STACK_BYTES - 1)
4094 / STACK_BYTES) * STACK_BYTES;
4095 #endif
4097 #ifdef ARGS_GROW_DOWNWARD
4098 current_function_arg_offset_rtx
4099 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4100 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4101 size_int (-stack_args_size.constant)),
4102 NULL_RTX, VOIDmode, 0));
4103 #else
4104 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4105 #endif
4107 /* See how many bytes, if any, of its args a function should try to pop
4108 on return. */
4110 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4111 current_function_args_size);
4113 /* For stdarg.h function, save info about
4114 regs and stack space used by the named args. */
4116 if (!hide_last_arg)
4117 current_function_args_info = args_so_far;
4119 /* Set the rtx used for the function return value. Put this in its
4120 own variable so any optimizers that need this information don't have
4121 to include tree.h. Do this here so it gets done when an inlined
4122 function gets output. */
4124 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4127 /* Indicate whether REGNO is an incoming argument to the current function
4128 that was promoted to a wider mode. If so, return the RTX for the
4129 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4130 that REGNO is promoted from and whether the promotion was signed or
4131 unsigned. */
4133 #ifdef PROMOTE_FUNCTION_ARGS
4136 promoted_input_arg (regno, pmode, punsignedp)
4137 int regno;
4138 enum machine_mode *pmode;
4139 int *punsignedp;
4141 tree arg;
4143 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4144 arg = TREE_CHAIN (arg))
4145 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4146 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4147 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4149 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4150 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4152 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4153 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4154 && mode != DECL_MODE (arg))
4156 *pmode = DECL_MODE (arg);
4157 *punsignedp = unsignedp;
4158 return DECL_INCOMING_RTL (arg);
4162 return 0;
4165 #endif
4167 /* Compute the size and offset from the start of the stacked arguments for a
4168 parm passed in mode PASSED_MODE and with type TYPE.
4170 INITIAL_OFFSET_PTR points to the current offset into the stacked
4171 arguments.
4173 The starting offset and size for this parm are returned in *OFFSET_PTR
4174 and *ARG_SIZE_PTR, respectively.
4176 IN_REGS is non-zero if the argument will be passed in registers. It will
4177 never be set if REG_PARM_STACK_SPACE is not defined.
4179 FNDECL is the function in which the argument was defined.
4181 There are two types of rounding that are done. The first, controlled by
4182 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4183 list to be aligned to the specific boundary (in bits). This rounding
4184 affects the initial and starting offsets, but not the argument size.
4186 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4187 optionally rounds the size of the parm to PARM_BOUNDARY. The
4188 initial offset is not affected by this rounding, while the size always
4189 is and the starting offset may be. */
4191 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4192 initial_offset_ptr is positive because locate_and_pad_parm's
4193 callers pass in the total size of args so far as
4194 initial_offset_ptr. arg_size_ptr is always positive.*/
4196 void
4197 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4198 initial_offset_ptr, offset_ptr, arg_size_ptr)
4199 enum machine_mode passed_mode;
4200 tree type;
4201 int in_regs;
4202 tree fndecl;
4203 struct args_size *initial_offset_ptr;
4204 struct args_size *offset_ptr;
4205 struct args_size *arg_size_ptr;
4207 tree sizetree
4208 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4209 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4210 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4211 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4212 int reg_parm_stack_space = 0;
4214 #ifdef REG_PARM_STACK_SPACE
4215 /* If we have found a stack parm before we reach the end of the
4216 area reserved for registers, skip that area. */
4217 if (! in_regs)
4219 #ifdef MAYBE_REG_PARM_STACK_SPACE
4220 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4221 #else
4222 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4223 #endif
4224 if (reg_parm_stack_space > 0)
4226 if (initial_offset_ptr->var)
4228 initial_offset_ptr->var
4229 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4230 size_int (reg_parm_stack_space));
4231 initial_offset_ptr->constant = 0;
4233 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4234 initial_offset_ptr->constant = reg_parm_stack_space;
4237 #endif /* REG_PARM_STACK_SPACE */
4239 arg_size_ptr->var = 0;
4240 arg_size_ptr->constant = 0;
4242 #ifdef ARGS_GROW_DOWNWARD
4243 if (initial_offset_ptr->var)
4245 offset_ptr->constant = 0;
4246 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4247 initial_offset_ptr->var);
4249 else
4251 offset_ptr->constant = - initial_offset_ptr->constant;
4252 offset_ptr->var = 0;
4254 if (where_pad != none
4255 && (TREE_CODE (sizetree) != INTEGER_CST
4256 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4257 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4258 SUB_PARM_SIZE (*offset_ptr, sizetree);
4259 if (where_pad != downward)
4260 pad_to_arg_alignment (offset_ptr, boundary);
4261 if (initial_offset_ptr->var)
4263 arg_size_ptr->var = size_binop (MINUS_EXPR,
4264 size_binop (MINUS_EXPR,
4265 integer_zero_node,
4266 initial_offset_ptr->var),
4267 offset_ptr->var);
4269 else
4271 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4272 offset_ptr->constant);
4274 #else /* !ARGS_GROW_DOWNWARD */
4275 pad_to_arg_alignment (initial_offset_ptr, boundary);
4276 *offset_ptr = *initial_offset_ptr;
4278 #ifdef PUSH_ROUNDING
4279 if (passed_mode != BLKmode)
4280 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4281 #endif
4283 /* Pad_below needs the pre-rounded size to know how much to pad below
4284 so this must be done before rounding up. */
4285 if (where_pad == downward
4286 /* However, BLKmode args passed in regs have their padding done elsewhere.
4287 The stack slot must be able to hold the entire register. */
4288 && !(in_regs && passed_mode == BLKmode))
4289 pad_below (offset_ptr, passed_mode, sizetree);
4291 if (where_pad != none
4292 && (TREE_CODE (sizetree) != INTEGER_CST
4293 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4294 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4296 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4297 #endif /* ARGS_GROW_DOWNWARD */
4300 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4301 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4303 static void
4304 pad_to_arg_alignment (offset_ptr, boundary)
4305 struct args_size *offset_ptr;
4306 int boundary;
4308 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4310 if (boundary > BITS_PER_UNIT)
4312 if (offset_ptr->var)
4314 offset_ptr->var =
4315 #ifdef ARGS_GROW_DOWNWARD
4316 round_down
4317 #else
4318 round_up
4319 #endif
4320 (ARGS_SIZE_TREE (*offset_ptr),
4321 boundary / BITS_PER_UNIT);
4322 offset_ptr->constant = 0; /*?*/
4324 else
4325 offset_ptr->constant =
4326 #ifdef ARGS_GROW_DOWNWARD
4327 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4328 #else
4329 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4330 #endif
4334 static void
4335 pad_below (offset_ptr, passed_mode, sizetree)
4336 struct args_size *offset_ptr;
4337 enum machine_mode passed_mode;
4338 tree sizetree;
4340 if (passed_mode != BLKmode)
4342 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4343 offset_ptr->constant
4344 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4345 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4346 - GET_MODE_SIZE (passed_mode));
4348 else
4350 if (TREE_CODE (sizetree) != INTEGER_CST
4351 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4353 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4354 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4355 /* Add it in. */
4356 ADD_PARM_SIZE (*offset_ptr, s2);
4357 SUB_PARM_SIZE (*offset_ptr, sizetree);
4362 static tree
4363 round_down (value, divisor)
4364 tree value;
4365 int divisor;
4367 return size_binop (MULT_EXPR,
4368 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4369 size_int (divisor));
4372 /* Walk the tree of blocks describing the binding levels within a function
4373 and warn about uninitialized variables.
4374 This is done after calling flow_analysis and before global_alloc
4375 clobbers the pseudo-regs to hard regs. */
4377 void
4378 uninitialized_vars_warning (block)
4379 tree block;
4381 register tree decl, sub;
4382 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4384 if (TREE_CODE (decl) == VAR_DECL
4385 /* These warnings are unreliable for and aggregates
4386 because assigning the fields one by one can fail to convince
4387 flow.c that the entire aggregate was initialized.
4388 Unions are troublesome because members may be shorter. */
4389 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4390 && DECL_RTL (decl) != 0
4391 && GET_CODE (DECL_RTL (decl)) == REG
4392 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4393 warning_with_decl (decl,
4394 "`%s' might be used uninitialized in this function");
4395 if (TREE_CODE (decl) == VAR_DECL
4396 && DECL_RTL (decl) != 0
4397 && GET_CODE (DECL_RTL (decl)) == REG
4398 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4399 warning_with_decl (decl,
4400 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4402 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4403 uninitialized_vars_warning (sub);
4406 /* Do the appropriate part of uninitialized_vars_warning
4407 but for arguments instead of local variables. */
4409 void
4410 setjmp_args_warning ()
4412 register tree decl;
4413 for (decl = DECL_ARGUMENTS (current_function_decl);
4414 decl; decl = TREE_CHAIN (decl))
4415 if (DECL_RTL (decl) != 0
4416 && GET_CODE (DECL_RTL (decl)) == REG
4417 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4418 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4421 /* If this function call setjmp, put all vars into the stack
4422 unless they were declared `register'. */
4424 void
4425 setjmp_protect (block)
4426 tree block;
4428 register tree decl, sub;
4429 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4430 if ((TREE_CODE (decl) == VAR_DECL
4431 || TREE_CODE (decl) == PARM_DECL)
4432 && DECL_RTL (decl) != 0
4433 && GET_CODE (DECL_RTL (decl)) == REG
4434 /* If this variable came from an inline function, it must be
4435 that it's life doesn't overlap the setjmp. If there was a
4436 setjmp in the function, it would already be in memory. We
4437 must exclude such variable because their DECL_RTL might be
4438 set to strange things such as virtual_stack_vars_rtx. */
4439 && ! DECL_FROM_INLINE (decl)
4440 && (
4441 #ifdef NON_SAVING_SETJMP
4442 /* If longjmp doesn't restore the registers,
4443 don't put anything in them. */
4444 NON_SAVING_SETJMP
4446 #endif
4447 ! DECL_REGISTER (decl)))
4448 put_var_into_stack (decl);
4449 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4450 setjmp_protect (sub);
4453 /* Like the previous function, but for args instead of local variables. */
4455 void
4456 setjmp_protect_args ()
4458 register tree decl, sub;
4459 for (decl = DECL_ARGUMENTS (current_function_decl);
4460 decl; decl = TREE_CHAIN (decl))
4461 if ((TREE_CODE (decl) == VAR_DECL
4462 || TREE_CODE (decl) == PARM_DECL)
4463 && DECL_RTL (decl) != 0
4464 && GET_CODE (DECL_RTL (decl)) == REG
4465 && (
4466 /* If longjmp doesn't restore the registers,
4467 don't put anything in them. */
4468 #ifdef NON_SAVING_SETJMP
4469 NON_SAVING_SETJMP
4471 #endif
4472 ! DECL_REGISTER (decl)))
4473 put_var_into_stack (decl);
4476 /* Return the context-pointer register corresponding to DECL,
4477 or 0 if it does not need one. */
4480 lookup_static_chain (decl)
4481 tree decl;
4483 tree context = decl_function_context (decl);
4484 tree link;
4486 if (context == 0
4487 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4488 return 0;
4490 /* We treat inline_function_decl as an alias for the current function
4491 because that is the inline function whose vars, types, etc.
4492 are being merged into the current function.
4493 See expand_inline_function. */
4494 if (context == current_function_decl || context == inline_function_decl)
4495 return virtual_stack_vars_rtx;
4497 for (link = context_display; link; link = TREE_CHAIN (link))
4498 if (TREE_PURPOSE (link) == context)
4499 return RTL_EXPR_RTL (TREE_VALUE (link));
4501 abort ();
4504 /* Convert a stack slot address ADDR for variable VAR
4505 (from a containing function)
4506 into an address valid in this function (using a static chain). */
4509 fix_lexical_addr (addr, var)
4510 rtx addr;
4511 tree var;
4513 rtx basereg;
4514 int displacement;
4515 tree context = decl_function_context (var);
4516 struct function *fp;
4517 rtx base = 0;
4519 /* If this is the present function, we need not do anything. */
4520 if (context == current_function_decl || context == inline_function_decl)
4521 return addr;
4523 for (fp = outer_function_chain; fp; fp = fp->next)
4524 if (fp->decl == context)
4525 break;
4527 if (fp == 0)
4528 abort ();
4530 /* Decode given address as base reg plus displacement. */
4531 if (GET_CODE (addr) == REG)
4532 basereg = addr, displacement = 0;
4533 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4534 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4535 else
4536 abort ();
4538 /* We accept vars reached via the containing function's
4539 incoming arg pointer and via its stack variables pointer. */
4540 if (basereg == fp->internal_arg_pointer)
4542 /* If reached via arg pointer, get the arg pointer value
4543 out of that function's stack frame.
4545 There are two cases: If a separate ap is needed, allocate a
4546 slot in the outer function for it and dereference it that way.
4547 This is correct even if the real ap is actually a pseudo.
4548 Otherwise, just adjust the offset from the frame pointer to
4549 compensate. */
4551 #ifdef NEED_SEPARATE_AP
4552 rtx addr;
4554 if (fp->arg_pointer_save_area == 0)
4555 fp->arg_pointer_save_area
4556 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4558 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4559 addr = memory_address (Pmode, addr);
4561 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4562 #else
4563 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4564 base = lookup_static_chain (var);
4565 #endif
4568 else if (basereg == virtual_stack_vars_rtx)
4570 /* This is the same code as lookup_static_chain, duplicated here to
4571 avoid an extra call to decl_function_context. */
4572 tree link;
4574 for (link = context_display; link; link = TREE_CHAIN (link))
4575 if (TREE_PURPOSE (link) == context)
4577 base = RTL_EXPR_RTL (TREE_VALUE (link));
4578 break;
4582 if (base == 0)
4583 abort ();
4585 /* Use same offset, relative to appropriate static chain or argument
4586 pointer. */
4587 return plus_constant (base, displacement);
4590 /* Return the address of the trampoline for entering nested fn FUNCTION.
4591 If necessary, allocate a trampoline (in the stack frame)
4592 and emit rtl to initialize its contents (at entry to this function). */
4595 trampoline_address (function)
4596 tree function;
4598 tree link;
4599 tree rtlexp;
4600 rtx tramp;
4601 struct function *fp;
4602 tree fn_context;
4604 /* Find an existing trampoline and return it. */
4605 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4606 if (TREE_PURPOSE (link) == function)
4607 return
4608 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4610 for (fp = outer_function_chain; fp; fp = fp->next)
4611 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4612 if (TREE_PURPOSE (link) == function)
4614 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4615 function);
4616 return round_trampoline_addr (tramp);
4619 /* None exists; we must make one. */
4621 /* Find the `struct function' for the function containing FUNCTION. */
4622 fp = 0;
4623 fn_context = decl_function_context (function);
4624 if (fn_context != current_function_decl)
4625 for (fp = outer_function_chain; fp; fp = fp->next)
4626 if (fp->decl == fn_context)
4627 break;
4629 /* Allocate run-time space for this trampoline
4630 (usually in the defining function's stack frame). */
4631 #ifdef ALLOCATE_TRAMPOLINE
4632 tramp = ALLOCATE_TRAMPOLINE (fp);
4633 #else
4634 /* If rounding needed, allocate extra space
4635 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4636 #ifdef TRAMPOLINE_ALIGNMENT
4637 #define TRAMPOLINE_REAL_SIZE \
4638 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4639 #else
4640 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4641 #endif
4642 if (fp != 0)
4643 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4644 else
4645 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4646 #endif
4648 /* Record the trampoline for reuse and note it for later initialization
4649 by expand_function_end. */
4650 if (fp != 0)
4652 push_obstacks (fp->function_maybepermanent_obstack,
4653 fp->function_maybepermanent_obstack);
4654 rtlexp = make_node (RTL_EXPR);
4655 RTL_EXPR_RTL (rtlexp) = tramp;
4656 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4657 pop_obstacks ();
4659 else
4661 /* Make the RTL_EXPR node temporary, not momentary, so that the
4662 trampoline_list doesn't become garbage. */
4663 int momentary = suspend_momentary ();
4664 rtlexp = make_node (RTL_EXPR);
4665 resume_momentary (momentary);
4667 RTL_EXPR_RTL (rtlexp) = tramp;
4668 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4671 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4672 return round_trampoline_addr (tramp);
4675 /* Given a trampoline address,
4676 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4678 static rtx
4679 round_trampoline_addr (tramp)
4680 rtx tramp;
4682 #ifdef TRAMPOLINE_ALIGNMENT
4683 /* Round address up to desired boundary. */
4684 rtx temp = gen_reg_rtx (Pmode);
4685 temp = expand_binop (Pmode, add_optab, tramp,
4686 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4687 temp, 0, OPTAB_LIB_WIDEN);
4688 tramp = expand_binop (Pmode, and_optab, temp,
4689 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4690 temp, 0, OPTAB_LIB_WIDEN);
4691 #endif
4692 return tramp;
4695 /* The functions identify_blocks and reorder_blocks provide a way to
4696 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4697 duplicate portions of the RTL code. Call identify_blocks before
4698 changing the RTL, and call reorder_blocks after. */
4700 /* Put all this function's BLOCK nodes including those that are chained
4701 onto the first block into a vector, and return it.
4702 Also store in each NOTE for the beginning or end of a block
4703 the index of that block in the vector.
4704 The arguments are BLOCK, the chain of top-level blocks of the function,
4705 and INSNS, the insn chain of the function. */
4707 tree *
4708 identify_blocks (block, insns)
4709 tree block;
4710 rtx insns;
4712 int n_blocks;
4713 tree *block_vector;
4714 int *block_stack;
4715 int depth = 0;
4716 int next_block_number = 1;
4717 int current_block_number = 1;
4718 rtx insn;
4720 if (block == 0)
4721 return 0;
4723 n_blocks = all_blocks (block, 0);
4724 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4725 block_stack = (int *) alloca (n_blocks * sizeof (int));
4727 all_blocks (block, block_vector);
4729 for (insn = insns; insn; insn = NEXT_INSN (insn))
4730 if (GET_CODE (insn) == NOTE)
4732 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4734 block_stack[depth++] = current_block_number;
4735 current_block_number = next_block_number;
4736 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4738 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4740 current_block_number = block_stack[--depth];
4741 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4745 if (n_blocks != next_block_number)
4746 abort ();
4748 return block_vector;
4751 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4752 and a revised instruction chain, rebuild the tree structure
4753 of BLOCK nodes to correspond to the new order of RTL.
4754 The new block tree is inserted below TOP_BLOCK.
4755 Returns the current top-level block. */
4757 tree
4758 reorder_blocks (block_vector, block, insns)
4759 tree *block_vector;
4760 tree block;
4761 rtx insns;
4763 tree current_block = block;
4764 rtx insn;
4766 if (block_vector == 0)
4767 return block;
4769 /* Prune the old trees away, so that it doesn't get in the way. */
4770 BLOCK_SUBBLOCKS (current_block) = 0;
4771 BLOCK_CHAIN (current_block) = 0;
4773 for (insn = insns; insn; insn = NEXT_INSN (insn))
4774 if (GET_CODE (insn) == NOTE)
4776 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4778 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4779 /* If we have seen this block before, copy it. */
4780 if (TREE_ASM_WRITTEN (block))
4781 block = copy_node (block);
4782 BLOCK_SUBBLOCKS (block) = 0;
4783 TREE_ASM_WRITTEN (block) = 1;
4784 BLOCK_SUPERCONTEXT (block) = current_block;
4785 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4786 BLOCK_SUBBLOCKS (current_block) = block;
4787 current_block = block;
4788 NOTE_SOURCE_FILE (insn) = 0;
4790 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4792 BLOCK_SUBBLOCKS (current_block)
4793 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4794 current_block = BLOCK_SUPERCONTEXT (current_block);
4795 NOTE_SOURCE_FILE (insn) = 0;
4799 BLOCK_SUBBLOCKS (current_block)
4800 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4801 return current_block;
4804 /* Reverse the order of elements in the chain T of blocks,
4805 and return the new head of the chain (old last element). */
4807 static tree
4808 blocks_nreverse (t)
4809 tree t;
4811 register tree prev = 0, decl, next;
4812 for (decl = t; decl; decl = next)
4814 next = BLOCK_CHAIN (decl);
4815 BLOCK_CHAIN (decl) = prev;
4816 prev = decl;
4818 return prev;
4821 /* Count the subblocks of the list starting with BLOCK, and list them
4822 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4823 blocks. */
4825 static int
4826 all_blocks (block, vector)
4827 tree block;
4828 tree *vector;
4830 int n_blocks = 0;
4832 while (block)
4834 TREE_ASM_WRITTEN (block) = 0;
4836 /* Record this block. */
4837 if (vector)
4838 vector[n_blocks] = block;
4840 ++n_blocks;
4842 /* Record the subblocks, and their subblocks... */
4843 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4844 vector ? vector + n_blocks : 0);
4845 block = BLOCK_CHAIN (block);
4848 return n_blocks;
4851 /* Build bytecode call descriptor for function SUBR. */
4854 bc_build_calldesc (subr)
4855 tree subr;
4857 tree calldesc = 0, arg;
4858 int nargs = 0;
4860 /* Build the argument description vector in reverse order. */
4861 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4862 nargs = 0;
4864 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4866 ++nargs;
4868 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4869 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4872 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4874 /* Prepend the function's return type. */
4875 calldesc = tree_cons ((tree) 0,
4876 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4877 calldesc);
4879 calldesc = tree_cons ((tree) 0,
4880 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4881 calldesc);
4883 /* Prepend the arg count. */
4884 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4886 /* Output the call description vector and get its address. */
4887 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4888 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4889 build_index_type (build_int_2 (nargs * 2, 0)));
4891 return output_constant_def (calldesc);
4895 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4896 and initialize static variables for generating RTL for the statements
4897 of the function. */
4899 void
4900 init_function_start (subr, filename, line)
4901 tree subr;
4902 char *filename;
4903 int line;
4905 if (output_bytecode)
4907 this_function_decl = subr;
4908 this_function_calldesc = bc_build_calldesc (subr);
4909 local_vars_size = 0;
4910 stack_depth = 0;
4911 max_stack_depth = 0;
4912 stmt_expr_depth = 0;
4913 return;
4916 init_stmt_for_function ();
4918 cse_not_expected = ! optimize;
4920 /* Caller save not needed yet. */
4921 caller_save_needed = 0;
4923 /* No stack slots have been made yet. */
4924 stack_slot_list = 0;
4926 /* There is no stack slot for handling nonlocal gotos. */
4927 nonlocal_goto_handler_slot = 0;
4928 nonlocal_goto_stack_level = 0;
4930 /* No labels have been declared for nonlocal use. */
4931 nonlocal_labels = 0;
4933 /* No function calls so far in this function. */
4934 function_call_count = 0;
4936 /* No parm regs have been allocated.
4937 (This is important for output_inline_function.) */
4938 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4940 /* Initialize the RTL mechanism. */
4941 init_emit ();
4943 /* Initialize the queue of pending postincrement and postdecrements,
4944 and some other info in expr.c. */
4945 init_expr ();
4947 /* We haven't done register allocation yet. */
4948 reg_renumber = 0;
4950 init_const_rtx_hash_table ();
4952 current_function_name = (*decl_printable_name) (subr, 2);
4954 /* Nonzero if this is a nested function that uses a static chain. */
4956 current_function_needs_context
4957 = (decl_function_context (current_function_decl) != 0
4958 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4960 /* Set if a call to setjmp is seen. */
4961 current_function_calls_setjmp = 0;
4963 /* Set if a call to longjmp is seen. */
4964 current_function_calls_longjmp = 0;
4966 current_function_calls_alloca = 0;
4967 current_function_has_nonlocal_label = 0;
4968 current_function_has_nonlocal_goto = 0;
4969 current_function_contains_functions = 0;
4971 current_function_returns_pcc_struct = 0;
4972 current_function_returns_struct = 0;
4973 current_function_epilogue_delay_list = 0;
4974 current_function_uses_const_pool = 0;
4975 current_function_uses_pic_offset_table = 0;
4977 /* We have not yet needed to make a label to jump to for tail-recursion. */
4978 tail_recursion_label = 0;
4980 /* We haven't had a need to make a save area for ap yet. */
4982 arg_pointer_save_area = 0;
4984 /* No stack slots allocated yet. */
4985 frame_offset = 0;
4987 /* No SAVE_EXPRs in this function yet. */
4988 save_expr_regs = 0;
4990 /* No RTL_EXPRs in this function yet. */
4991 rtl_expr_chain = 0;
4993 /* Set up to allocate temporaries. */
4994 init_temp_slots ();
4996 /* Within function body, compute a type's size as soon it is laid out. */
4997 immediate_size_expand++;
4999 /* We haven't made any trampolines for this function yet. */
5000 trampoline_list = 0;
5002 init_pending_stack_adjust ();
5003 inhibit_defer_pop = 0;
5005 current_function_outgoing_args_size = 0;
5007 /* Prevent ever trying to delete the first instruction of a function.
5008 Also tell final how to output a linenum before the function prologue. */
5009 emit_line_note (filename, line);
5011 /* Make sure first insn is a note even if we don't want linenums.
5012 This makes sure the first insn will never be deleted.
5013 Also, final expects a note to appear there. */
5014 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5016 /* Set flags used by final.c. */
5017 if (aggregate_value_p (DECL_RESULT (subr)))
5019 #ifdef PCC_STATIC_STRUCT_RETURN
5020 current_function_returns_pcc_struct = 1;
5021 #endif
5022 current_function_returns_struct = 1;
5025 /* Warn if this value is an aggregate type,
5026 regardless of which calling convention we are using for it. */
5027 if (warn_aggregate_return
5028 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5029 warning ("function returns an aggregate");
5031 current_function_returns_pointer
5032 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5034 /* Indicate that we need to distinguish between the return value of the
5035 present function and the return value of a function being called. */
5036 rtx_equal_function_value_matters = 1;
5038 /* Indicate that we have not instantiated virtual registers yet. */
5039 virtuals_instantiated = 0;
5041 /* Indicate we have no need of a frame pointer yet. */
5042 frame_pointer_needed = 0;
5044 /* By default assume not varargs or stdarg. */
5045 current_function_varargs = 0;
5046 current_function_stdarg = 0;
5049 /* Indicate that the current function uses extra args
5050 not explicitly mentioned in the argument list in any fashion. */
5052 void
5053 mark_varargs ()
5055 current_function_varargs = 1;
5058 /* Expand a call to __main at the beginning of a possible main function. */
5060 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5061 #undef HAS_INIT_SECTION
5062 #define HAS_INIT_SECTION
5063 #endif
5065 void
5066 expand_main_function ()
5068 if (!output_bytecode)
5070 /* The zero below avoids a possible parse error */
5072 #if !defined (HAS_INIT_SECTION)
5073 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5074 VOIDmode, 0);
5075 #endif /* not HAS_INIT_SECTION */
5079 extern struct obstack permanent_obstack;
5081 /* Expand start of bytecode function. See comment at
5082 expand_function_start below for details. */
5084 void
5085 bc_expand_function_start (subr, parms_have_cleanups)
5086 tree subr;
5087 int parms_have_cleanups;
5089 char label[20], *name;
5090 static int nlab;
5091 tree thisarg;
5092 int argsz;
5094 if (TREE_PUBLIC (subr))
5095 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5097 #ifdef DEBUG_PRINT_CODE
5098 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5099 #endif
5101 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5103 if (DECL_RTL (thisarg))
5104 abort (); /* Should be NULL here I think. */
5105 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5107 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5108 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5110 else
5112 /* Variable-sized objects are pointers to their storage. */
5113 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5114 argsz += POINTER_SIZE;
5118 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5120 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5122 ++nlab;
5123 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5124 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5125 this_function_bytecode =
5126 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5130 /* Expand end of bytecode function. See details the comment of
5131 expand_function_end(), below. */
5133 void
5134 bc_expand_function_end ()
5136 char *ptrconsts;
5138 expand_null_return ();
5140 /* Emit any fixup code. This must be done before the call to
5141 to BC_END_FUNCTION (), since that will cause the bytecode
5142 segment to be finished off and closed. */
5144 expand_fixups (NULL_RTX);
5146 ptrconsts = bc_end_function ();
5148 bc_align_const (2 /* INT_ALIGN */);
5150 /* If this changes also make sure to change bc-interp.h! */
5152 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5153 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5154 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5155 bc_emit_const_labelref (this_function_bytecode, 0);
5156 bc_emit_const_labelref (ptrconsts, 0);
5157 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5161 /* Start the RTL for a new function, and set variables used for
5162 emitting RTL.
5163 SUBR is the FUNCTION_DECL node.
5164 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5165 the function's parameters, which must be run at any return statement. */
5167 void
5168 expand_function_start (subr, parms_have_cleanups)
5169 tree subr;
5170 int parms_have_cleanups;
5172 register int i;
5173 tree tem;
5174 rtx last_ptr;
5176 if (output_bytecode)
5178 bc_expand_function_start (subr, parms_have_cleanups);
5179 return;
5182 /* Make sure volatile mem refs aren't considered
5183 valid operands of arithmetic insns. */
5184 init_recog_no_volatile ();
5186 /* If function gets a static chain arg, store it in the stack frame.
5187 Do this first, so it gets the first stack slot offset. */
5188 if (current_function_needs_context)
5190 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5192 #ifdef SMALL_REGISTER_CLASSES
5193 /* Delay copying static chain if it is not a register to avoid
5194 conflicts with regs used for parameters. */
5195 if (! SMALL_REGISTER_CLASSES
5196 || GET_CODE (static_chain_incoming_rtx) == REG)
5197 #endif
5198 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5201 /* If the parameters of this function need cleaning up, get a label
5202 for the beginning of the code which executes those cleanups. This must
5203 be done before doing anything with return_label. */
5204 if (parms_have_cleanups)
5205 cleanup_label = gen_label_rtx ();
5206 else
5207 cleanup_label = 0;
5209 /* Make the label for return statements to jump to, if this machine
5210 does not have a one-instruction return and uses an epilogue,
5211 or if it returns a structure, or if it has parm cleanups. */
5212 #ifdef HAVE_return
5213 if (cleanup_label == 0 && HAVE_return
5214 && ! current_function_returns_pcc_struct
5215 && ! (current_function_returns_struct && ! optimize))
5216 return_label = 0;
5217 else
5218 return_label = gen_label_rtx ();
5219 #else
5220 return_label = gen_label_rtx ();
5221 #endif
5223 /* Initialize rtx used to return the value. */
5224 /* Do this before assign_parms so that we copy the struct value address
5225 before any library calls that assign parms might generate. */
5227 /* Decide whether to return the value in memory or in a register. */
5228 if (aggregate_value_p (DECL_RESULT (subr)))
5230 /* Returning something that won't go in a register. */
5231 register rtx value_address = 0;
5233 #ifdef PCC_STATIC_STRUCT_RETURN
5234 if (current_function_returns_pcc_struct)
5236 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5237 value_address = assemble_static_space (size);
5239 else
5240 #endif
5242 /* Expect to be passed the address of a place to store the value.
5243 If it is passed as an argument, assign_parms will take care of
5244 it. */
5245 if (struct_value_incoming_rtx)
5247 value_address = gen_reg_rtx (Pmode);
5248 emit_move_insn (value_address, struct_value_incoming_rtx);
5251 if (value_address)
5253 DECL_RTL (DECL_RESULT (subr))
5254 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5255 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5256 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5259 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5260 /* If return mode is void, this decl rtl should not be used. */
5261 DECL_RTL (DECL_RESULT (subr)) = 0;
5262 else if (parms_have_cleanups)
5264 /* If function will end with cleanup code for parms,
5265 compute the return values into a pseudo reg,
5266 which we will copy into the true return register
5267 after the cleanups are done. */
5269 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5271 #ifdef PROMOTE_FUNCTION_RETURN
5272 tree type = TREE_TYPE (DECL_RESULT (subr));
5273 int unsignedp = TREE_UNSIGNED (type);
5275 mode = promote_mode (type, mode, &unsignedp, 1);
5276 #endif
5278 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5280 else
5281 /* Scalar, returned in a register. */
5283 #ifdef FUNCTION_OUTGOING_VALUE
5284 DECL_RTL (DECL_RESULT (subr))
5285 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5286 #else
5287 DECL_RTL (DECL_RESULT (subr))
5288 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5289 #endif
5291 /* Mark this reg as the function's return value. */
5292 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5294 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5295 /* Needed because we may need to move this to memory
5296 in case it's a named return value whose address is taken. */
5297 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5301 /* Initialize rtx for parameters and local variables.
5302 In some cases this requires emitting insns. */
5304 assign_parms (subr, 0);
5306 #ifdef SMALL_REGISTER_CLASSES
5307 /* Copy the static chain now if it wasn't a register. The delay is to
5308 avoid conflicts with the parameter passing registers. */
5310 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5311 if (GET_CODE (static_chain_incoming_rtx) != REG)
5312 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5313 #endif
5315 /* The following was moved from init_function_start.
5316 The move is supposed to make sdb output more accurate. */
5317 /* Indicate the beginning of the function body,
5318 as opposed to parm setup. */
5319 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5321 /* If doing stupid allocation, mark parms as born here. */
5323 if (GET_CODE (get_last_insn ()) != NOTE)
5324 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5325 parm_birth_insn = get_last_insn ();
5327 if (obey_regdecls)
5329 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5330 use_variable (regno_reg_rtx[i]);
5332 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5333 use_variable (current_function_internal_arg_pointer);
5336 context_display = 0;
5337 if (current_function_needs_context)
5339 /* Fetch static chain values for containing functions. */
5340 tem = decl_function_context (current_function_decl);
5341 /* If not doing stupid register allocation copy the static chain
5342 pointer into a pseudo. If we have small register classes, copy
5343 the value from memory if static_chain_incoming_rtx is a REG. If
5344 we do stupid register allocation, we use the stack address
5345 generated above. */
5346 if (tem && ! obey_regdecls)
5348 #ifdef SMALL_REGISTER_CLASSES
5349 /* If the static chain originally came in a register, put it back
5350 there, then move it out in the next insn. The reason for
5351 this peculiar code is to satisfy function integration. */
5352 if (SMALL_REGISTER_CLASSES
5353 && GET_CODE (static_chain_incoming_rtx) == REG)
5354 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5355 #endif
5357 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5360 while (tem)
5362 tree rtlexp = make_node (RTL_EXPR);
5364 RTL_EXPR_RTL (rtlexp) = last_ptr;
5365 context_display = tree_cons (tem, rtlexp, context_display);
5366 tem = decl_function_context (tem);
5367 if (tem == 0)
5368 break;
5369 /* Chain thru stack frames, assuming pointer to next lexical frame
5370 is found at the place we always store it. */
5371 #ifdef FRAME_GROWS_DOWNWARD
5372 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5373 #endif
5374 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5375 memory_address (Pmode, last_ptr)));
5377 /* If we are not optimizing, ensure that we know that this
5378 piece of context is live over the entire function. */
5379 if (! optimize)
5380 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5381 save_expr_regs);
5385 /* After the display initializations is where the tail-recursion label
5386 should go, if we end up needing one. Ensure we have a NOTE here
5387 since some things (like trampolines) get placed before this. */
5388 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5390 /* Evaluate now the sizes of any types declared among the arguments. */
5391 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5392 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5394 /* Make sure there is a line number after the function entry setup code. */
5395 force_next_line_note ();
5398 /* Generate RTL for the end of the current function.
5399 FILENAME and LINE are the current position in the source file.
5401 It is up to language-specific callers to do cleanups for parameters--
5402 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5404 void
5405 expand_function_end (filename, line, end_bindings)
5406 char *filename;
5407 int line;
5408 int end_bindings;
5410 register int i;
5411 tree link;
5413 #ifdef TRAMPOLINE_TEMPLATE
5414 static rtx initial_trampoline;
5415 #endif
5417 if (output_bytecode)
5419 bc_expand_function_end ();
5420 return;
5423 #ifdef NON_SAVING_SETJMP
5424 /* Don't put any variables in registers if we call setjmp
5425 on a machine that fails to restore the registers. */
5426 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5428 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5429 setjmp_protect (DECL_INITIAL (current_function_decl));
5431 setjmp_protect_args ();
5433 #endif
5435 /* Save the argument pointer if a save area was made for it. */
5436 if (arg_pointer_save_area)
5438 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5439 emit_insn_before (x, tail_recursion_reentry);
5442 /* Initialize any trampolines required by this function. */
5443 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5445 tree function = TREE_PURPOSE (link);
5446 rtx context = lookup_static_chain (function);
5447 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5448 rtx blktramp;
5449 rtx seq;
5451 #ifdef TRAMPOLINE_TEMPLATE
5452 /* First make sure this compilation has a template for
5453 initializing trampolines. */
5454 if (initial_trampoline == 0)
5456 end_temporary_allocation ();
5457 initial_trampoline
5458 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5459 resume_temporary_allocation ();
5461 #endif
5463 /* Generate insns to initialize the trampoline. */
5464 start_sequence ();
5465 tramp = round_trampoline_addr (XEXP (tramp, 0));
5466 #ifdef TRAMPOLINE_TEMPLATE
5467 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5468 emit_block_move (blktramp, initial_trampoline,
5469 GEN_INT (TRAMPOLINE_SIZE),
5470 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5471 #endif
5472 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5473 seq = get_insns ();
5474 end_sequence ();
5476 /* Put those insns at entry to the containing function (this one). */
5477 emit_insns_before (seq, tail_recursion_reentry);
5480 /* Warn about unused parms if extra warnings were specified. */
5481 if (warn_unused && extra_warnings)
5483 tree decl;
5485 for (decl = DECL_ARGUMENTS (current_function_decl);
5486 decl; decl = TREE_CHAIN (decl))
5487 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5488 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5489 warning_with_decl (decl, "unused parameter `%s'");
5492 /* Delete handlers for nonlocal gotos if nothing uses them. */
5493 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5494 delete_handlers ();
5496 /* End any sequences that failed to be closed due to syntax errors. */
5497 while (in_sequence_p ())
5498 end_sequence ();
5500 /* Outside function body, can't compute type's actual size
5501 until next function's body starts. */
5502 immediate_size_expand--;
5504 /* If doing stupid register allocation,
5505 mark register parms as dying here. */
5507 if (obey_regdecls)
5509 rtx tem;
5510 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5511 use_variable (regno_reg_rtx[i]);
5513 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5515 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5517 use_variable (XEXP (tem, 0));
5518 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5521 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5522 use_variable (current_function_internal_arg_pointer);
5525 clear_pending_stack_adjust ();
5526 do_pending_stack_adjust ();
5528 /* Mark the end of the function body.
5529 If control reaches this insn, the function can drop through
5530 without returning a value. */
5531 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5533 /* Must mark the last line number note in the function, so that the test
5534 coverage code can avoid counting the last line twice. This just tells
5535 the code to ignore the immediately following line note, since there
5536 already exists a copy of this note somewhere above. This line number
5537 note is still needed for debugging though, so we can't delete it. */
5538 if (flag_test_coverage)
5539 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5541 /* Output a linenumber for the end of the function.
5542 SDB depends on this. */
5543 emit_line_note_force (filename, line);
5545 /* Output the label for the actual return from the function,
5546 if one is expected. This happens either because a function epilogue
5547 is used instead of a return instruction, or because a return was done
5548 with a goto in order to run local cleanups, or because of pcc-style
5549 structure returning. */
5551 if (return_label)
5552 emit_label (return_label);
5554 /* C++ uses this. */
5555 if (end_bindings)
5556 expand_end_bindings (0, 0, 0);
5558 /* If we had calls to alloca, and this machine needs
5559 an accurate stack pointer to exit the function,
5560 insert some code to save and restore the stack pointer. */
5561 #ifdef EXIT_IGNORE_STACK
5562 if (! EXIT_IGNORE_STACK)
5563 #endif
5564 if (current_function_calls_alloca)
5566 rtx tem = 0;
5568 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5569 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5572 /* If scalar return value was computed in a pseudo-reg,
5573 copy that to the hard return register. */
5574 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5575 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5576 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5577 >= FIRST_PSEUDO_REGISTER))
5579 rtx real_decl_result;
5581 #ifdef FUNCTION_OUTGOING_VALUE
5582 real_decl_result
5583 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5584 current_function_decl);
5585 #else
5586 real_decl_result
5587 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5588 current_function_decl);
5589 #endif
5590 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5591 /* If this is a BLKmode structure being returned in registers, then use
5592 the mode computed in expand_return. */
5593 if (GET_MODE (real_decl_result) == BLKmode)
5594 PUT_MODE (real_decl_result,
5595 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5596 emit_move_insn (real_decl_result,
5597 DECL_RTL (DECL_RESULT (current_function_decl)));
5598 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5601 /* If returning a structure, arrange to return the address of the value
5602 in a place where debuggers expect to find it.
5604 If returning a structure PCC style,
5605 the caller also depends on this value.
5606 And current_function_returns_pcc_struct is not necessarily set. */
5607 if (current_function_returns_struct
5608 || current_function_returns_pcc_struct)
5610 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5611 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5612 #ifdef FUNCTION_OUTGOING_VALUE
5613 rtx outgoing
5614 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5615 current_function_decl);
5616 #else
5617 rtx outgoing
5618 = FUNCTION_VALUE (build_pointer_type (type),
5619 current_function_decl);
5620 #endif
5622 /* Mark this as a function return value so integrate will delete the
5623 assignment and USE below when inlining this function. */
5624 REG_FUNCTION_VALUE_P (outgoing) = 1;
5626 emit_move_insn (outgoing, value_address);
5627 use_variable (outgoing);
5630 /* Output a return insn if we are using one.
5631 Otherwise, let the rtl chain end here, to drop through
5632 into the epilogue. */
5634 #ifdef HAVE_return
5635 if (HAVE_return)
5637 emit_jump_insn (gen_return ());
5638 emit_barrier ();
5640 #endif
5642 /* Fix up any gotos that jumped out to the outermost
5643 binding level of the function.
5644 Must follow emitting RETURN_LABEL. */
5646 /* If you have any cleanups to do at this point,
5647 and they need to create temporary variables,
5648 then you will lose. */
5649 expand_fixups (get_insns ());
5652 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5654 static int *prologue;
5655 static int *epilogue;
5657 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5658 or a single insn). */
5660 static int *
5661 record_insns (insns)
5662 rtx insns;
5664 int *vec;
5666 if (GET_CODE (insns) == SEQUENCE)
5668 int len = XVECLEN (insns, 0);
5669 vec = (int *) oballoc ((len + 1) * sizeof (int));
5670 vec[len] = 0;
5671 while (--len >= 0)
5672 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5674 else
5676 vec = (int *) oballoc (2 * sizeof (int));
5677 vec[0] = INSN_UID (insns);
5678 vec[1] = 0;
5680 return vec;
5683 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5685 static int
5686 contains (insn, vec)
5687 rtx insn;
5688 int *vec;
5690 register int i, j;
5692 if (GET_CODE (insn) == INSN
5693 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5695 int count = 0;
5696 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5697 for (j = 0; vec[j]; j++)
5698 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5699 count++;
5700 return count;
5702 else
5704 for (j = 0; vec[j]; j++)
5705 if (INSN_UID (insn) == vec[j])
5706 return 1;
5708 return 0;
5711 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5712 this into place with notes indicating where the prologue ends and where
5713 the epilogue begins. Update the basic block information when possible. */
5715 void
5716 thread_prologue_and_epilogue_insns (f)
5717 rtx f;
5719 #ifdef HAVE_prologue
5720 if (HAVE_prologue)
5722 rtx head, seq, insn;
5724 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5725 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5726 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5727 seq = gen_prologue ();
5728 head = emit_insn_after (seq, f);
5730 /* Include the new prologue insns in the first block. Ignore them
5731 if they form a basic block unto themselves. */
5732 if (basic_block_head && n_basic_blocks
5733 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5734 basic_block_head[0] = NEXT_INSN (f);
5736 /* Retain a map of the prologue insns. */
5737 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5739 else
5740 #endif
5741 prologue = 0;
5743 #ifdef HAVE_epilogue
5744 if (HAVE_epilogue)
5746 rtx insn = get_last_insn ();
5747 rtx prev = prev_nonnote_insn (insn);
5749 /* If we end with a BARRIER, we don't need an epilogue. */
5750 if (! (prev && GET_CODE (prev) == BARRIER))
5752 rtx tail, seq, tem;
5753 rtx first_use = 0;
5754 rtx last_use = 0;
5756 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5757 epilogue insns, the USE insns at the end of a function,
5758 the jump insn that returns, and then a BARRIER. */
5760 /* Move the USE insns at the end of a function onto a list. */
5761 while (prev
5762 && GET_CODE (prev) == INSN
5763 && GET_CODE (PATTERN (prev)) == USE)
5765 tem = prev;
5766 prev = prev_nonnote_insn (prev);
5768 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5769 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5770 if (first_use)
5772 NEXT_INSN (tem) = first_use;
5773 PREV_INSN (first_use) = tem;
5775 first_use = tem;
5776 if (!last_use)
5777 last_use = tem;
5780 emit_barrier_after (insn);
5782 seq = gen_epilogue ();
5783 tail = emit_jump_insn_after (seq, insn);
5785 /* Insert the USE insns immediately before the return insn, which
5786 must be the first instruction before the final barrier. */
5787 if (first_use)
5789 tem = prev_nonnote_insn (get_last_insn ());
5790 NEXT_INSN (PREV_INSN (tem)) = first_use;
5791 PREV_INSN (first_use) = PREV_INSN (tem);
5792 PREV_INSN (tem) = last_use;
5793 NEXT_INSN (last_use) = tem;
5796 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5798 /* Include the new epilogue insns in the last block. Ignore
5799 them if they form a basic block unto themselves. */
5800 if (basic_block_end && n_basic_blocks
5801 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5802 basic_block_end[n_basic_blocks - 1] = tail;
5804 /* Retain a map of the epilogue insns. */
5805 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5806 return;
5809 #endif
5810 epilogue = 0;
5813 /* Reposition the prologue-end and epilogue-begin notes after instruction
5814 scheduling and delayed branch scheduling. */
5816 void
5817 reposition_prologue_and_epilogue_notes (f)
5818 rtx f;
5820 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5821 /* Reposition the prologue and epilogue notes. */
5822 if (n_basic_blocks)
5824 rtx next, prev;
5825 int len;
5827 if (prologue)
5829 register rtx insn, note = 0;
5831 /* Scan from the beginning until we reach the last prologue insn.
5832 We apparently can't depend on basic_block_{head,end} after
5833 reorg has run. */
5834 for (len = 0; prologue[len]; len++)
5836 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5838 if (GET_CODE (insn) == NOTE)
5840 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5841 note = insn;
5843 else if ((len -= contains (insn, prologue)) == 0)
5845 /* Find the prologue-end note if we haven't already, and
5846 move it to just after the last prologue insn. */
5847 if (note == 0)
5849 for (note = insn; note = NEXT_INSN (note);)
5850 if (GET_CODE (note) == NOTE
5851 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5852 break;
5854 next = NEXT_INSN (note);
5855 prev = PREV_INSN (note);
5856 if (prev)
5857 NEXT_INSN (prev) = next;
5858 if (next)
5859 PREV_INSN (next) = prev;
5860 add_insn_after (note, insn);
5865 if (epilogue)
5867 register rtx insn, note = 0;
5869 /* Scan from the end until we reach the first epilogue insn.
5870 We apparently can't depend on basic_block_{head,end} after
5871 reorg has run. */
5872 for (len = 0; epilogue[len]; len++)
5874 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5876 if (GET_CODE (insn) == NOTE)
5878 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5879 note = insn;
5881 else if ((len -= contains (insn, epilogue)) == 0)
5883 /* Find the epilogue-begin note if we haven't already, and
5884 move it to just before the first epilogue insn. */
5885 if (note == 0)
5887 for (note = insn; note = PREV_INSN (note);)
5888 if (GET_CODE (note) == NOTE
5889 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5890 break;
5892 next = NEXT_INSN (note);
5893 prev = PREV_INSN (note);
5894 if (prev)
5895 NEXT_INSN (prev) = next;
5896 if (next)
5897 PREV_INSN (next) = prev;
5898 add_insn_after (note, PREV_INSN (insn));
5903 #endif /* HAVE_prologue or HAVE_epilogue */