(ASM_SPEC): Change {% to %{.
[official-gcc.git] / gcc / function.c
blob275685e6ab29a1fe4c350bd557bc91ce159bed3b
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "insn-flags.h"
48 #include "expr.h"
49 #include "insn-codes.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "bytecode.h"
58 #include "bc-emit.h"
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or neither. */
64 #ifndef NAME__MAIN
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
67 #endif
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74 /* Similar, but round to the next highest integer that meets the
75 alignment. */
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
86 #endif
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
92 int current_function_pops_args;
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
97 int current_function_returns_struct;
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
102 int current_function_returns_pcc_struct;
104 /* Nonzero if function being compiled needs to be passed a static chain. */
106 int current_function_needs_context;
108 /* Nonzero if function being compiled can call setjmp. */
110 int current_function_calls_setjmp;
112 /* Nonzero if function being compiled can call longjmp. */
114 int current_function_calls_longjmp;
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
119 int current_function_has_nonlocal_label;
121 /* Nonzero if function being compiled has nonlocal gotos to parent
122 function. */
124 int current_function_has_nonlocal_goto;
126 /* Nonzero if function being compiled contains nested functions. */
128 int current_function_contains_functions;
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
133 int current_function_calls_alloca;
135 /* Nonzero if the current function returns a pointer type */
137 int current_function_returns_pointer;
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
142 rtx current_function_epilogue_delay_list;
144 /* If function's args have a fixed size, this is that size, in bytes.
145 Otherwise, it is -1.
146 May affect compilation of return insn or of function epilogue. */
148 int current_function_args_size;
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
153 int current_function_pretend_args_size;
155 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
156 defined, the needed space is pushed by the prologue. */
158 int current_function_outgoing_args_size;
160 /* This is the offset from the arg pointer to the place where the first
161 anonymous arg can be found, if there is one. */
163 rtx current_function_arg_offset_rtx;
165 /* Nonzero if current function uses varargs.h or equivalent.
166 Zero for functions that use stdarg.h. */
168 int current_function_varargs;
170 /* Nonzero if current function uses stdarg.h or equivalent.
171 Zero for functions that use varargs.h. */
173 int current_function_stdarg;
175 /* Quantities of various kinds of registers
176 used for the current function's args. */
178 CUMULATIVE_ARGS current_function_args_info;
180 /* Name of function now being compiled. */
182 char *current_function_name;
184 /* If non-zero, an RTL expression for that location at which the current
185 function returns its result. Always equal to
186 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
187 independently of the tree structures. */
189 rtx current_function_return_rtx;
191 /* Nonzero if the current function uses the constant pool. */
193 int current_function_uses_const_pool;
195 /* Nonzero if the current function uses pic_offset_table_rtx. */
196 int current_function_uses_pic_offset_table;
198 /* The arg pointer hard register, or the pseudo into which it was copied. */
199 rtx current_function_internal_arg_pointer;
201 /* The FUNCTION_DECL for an inline function currently being expanded. */
202 tree inline_function_decl;
204 /* Number of function calls seen so far in current function. */
206 int function_call_count;
208 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
209 (labels to which there can be nonlocal gotos from nested functions)
210 in this function. */
212 tree nonlocal_labels;
214 /* RTX for stack slot that holds the current handler for nonlocal gotos.
215 Zero when function does not have nonlocal labels. */
217 rtx nonlocal_goto_handler_slot;
219 /* RTX for stack slot that holds the stack pointer value to restore
220 for a nonlocal goto.
221 Zero when function does not have nonlocal labels. */
223 rtx nonlocal_goto_stack_level;
225 /* Label that will go on parm cleanup code, if any.
226 Jumping to this label runs cleanup code for parameters, if
227 such code must be run. Following this code is the logical return label. */
229 rtx cleanup_label;
231 /* Label that will go on function epilogue.
232 Jumping to this label serves as a "return" instruction
233 on machines which require execution of the epilogue on all returns. */
235 rtx return_label;
237 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
238 So we can mark them all live at the end of the function, if nonopt. */
239 rtx save_expr_regs;
241 /* List (chain of EXPR_LISTs) of all stack slots in this function.
242 Made for the sake of unshare_all_rtl. */
243 rtx stack_slot_list;
245 /* Chain of all RTL_EXPRs that have insns in them. */
246 tree rtl_expr_chain;
248 /* Label to jump back to for tail recursion, or 0 if we have
249 not yet needed one for this function. */
250 rtx tail_recursion_label;
252 /* Place after which to insert the tail_recursion_label if we need one. */
253 rtx tail_recursion_reentry;
255 /* Location at which to save the argument pointer if it will need to be
256 referenced. There are two cases where this is done: if nonlocal gotos
257 exist, or if vars stored at an offset from the argument pointer will be
258 needed by inner routines. */
260 rtx arg_pointer_save_area;
262 /* Offset to end of allocated area of stack frame.
263 If stack grows down, this is the address of the last stack slot allocated.
264 If stack grows up, this is the address for the next slot. */
265 int frame_offset;
267 /* List (chain of TREE_LISTs) of static chains for containing functions.
268 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
269 in an RTL_EXPR in the TREE_VALUE. */
270 static tree context_display;
272 /* List (chain of TREE_LISTs) of trampolines for nested functions.
273 The trampoline sets up the static chain and jumps to the function.
274 We supply the trampoline's address when the function's address is requested.
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree trampoline_list;
280 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
281 static rtx parm_birth_insn;
283 #if 0
284 /* Nonzero if a stack slot has been generated whose address is not
285 actually valid. It means that the generated rtl must all be scanned
286 to detect and correct the invalid addresses where they occur. */
287 static int invalid_stack_slot;
288 #endif
290 /* Last insn of those whose job was to put parms into their nominal homes. */
291 static rtx last_parm_insn;
293 /* 1 + last pseudo register number used for loading a copy
294 of a parameter of this function. */
295 static int max_parm_reg;
297 /* Vector indexed by REGNO, containing location on stack in which
298 to put the parm which is nominally in pseudo register REGNO,
299 if we discover that that parm must go in the stack. */
300 static rtx *parm_reg_stack_loc;
302 /* Nonzero once virtual register instantiation has been done.
303 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
304 static int virtuals_instantiated;
306 /* These variables hold pointers to functions to
307 save and restore machine-specific data,
308 in push_function_context and pop_function_context. */
309 void (*save_machine_status) PROTO((struct function *));
310 void (*restore_machine_status) PROTO((struct function *));
312 /* Nonzero if we need to distinguish between the return value of this function
313 and the return value of a function called by this function. This helps
314 integrate.c */
316 extern int rtx_equal_function_value_matters;
317 extern tree sequence_rtl_expr;
319 /* In order to evaluate some expressions, such as function calls returning
320 structures in memory, we need to temporarily allocate stack locations.
321 We record each allocated temporary in the following structure.
323 Associated with each temporary slot is a nesting level. When we pop up
324 one level, all temporaries associated with the previous level are freed.
325 Normally, all temporaries are freed after the execution of the statement
326 in which they were created. However, if we are inside a ({...}) grouping,
327 the result may be in a temporary and hence must be preserved. If the
328 result could be in a temporary, we preserve it if we can determine which
329 one it is in. If we cannot determine which temporary may contain the
330 result, all temporaries are preserved. A temporary is preserved by
331 pretending it was allocated at the previous nesting level.
333 Automatic variables are also assigned temporary slots, at the nesting
334 level where they are defined. They are marked a "kept" so that
335 free_temp_slots will not free them. */
337 struct temp_slot
339 /* Points to next temporary slot. */
340 struct temp_slot *next;
341 /* The rtx to used to reference the slot. */
342 rtx slot;
343 /* The rtx used to represent the address if not the address of the
344 slot above. May be an EXPR_LIST if multiple addresses exist. */
345 rtx address;
346 /* The size, in units, of the slot. */
347 int size;
348 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
349 tree rtl_expr;
350 /* Non-zero if this temporary is currently in use. */
351 char in_use;
352 /* Non-zero if this temporary has its address taken. */
353 char addr_taken;
354 /* Nesting level at which this slot is being used. */
355 int level;
356 /* Non-zero if this should survive a call to free_temp_slots. */
357 int keep;
358 /* The offset of the slot from the frame_pointer, including extra space
359 for alignment. This info is for combine_temp_slots. */
360 int base_offset;
361 /* The size of the slot, including extra space for alignment. This
362 info is for combine_temp_slots. */
363 int full_size;
366 /* List of all temporaries allocated, both available and in use. */
368 struct temp_slot *temp_slots;
370 /* Current nesting level for temporaries. */
372 int temp_slot_level;
374 /* The FUNCTION_DECL node for the current function. */
375 static tree this_function_decl;
377 /* Callinfo pointer for the current function. */
378 static rtx this_function_callinfo;
380 /* The label in the bytecode file of this function's actual bytecode.
381 Not an rtx. */
382 static char *this_function_bytecode;
384 /* The call description vector for the current function. */
385 static rtx this_function_calldesc;
387 /* Size of the local variables allocated for the current function. */
388 int local_vars_size;
390 /* Current depth of the bytecode evaluation stack. */
391 int stack_depth;
393 /* Maximum depth of the evaluation stack in this function. */
394 int max_stack_depth;
396 /* Current depth in statement expressions. */
397 static int stmt_expr_depth;
399 /* This structure is used to record MEMs or pseudos used to replace VAR, any
400 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
401 maintain this list in case two operands of an insn were required to match;
402 in that case we must ensure we use the same replacement. */
404 struct fixup_replacement
406 rtx old;
407 rtx new;
408 struct fixup_replacement *next;
411 /* Forward declarations. */
413 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
414 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
415 enum machine_mode, enum machine_mode,
416 int));
417 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
418 static struct fixup_replacement
419 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
420 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
421 rtx, int));
422 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
423 struct fixup_replacement **));
424 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
425 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
426 static rtx fixup_stack_1 PROTO((rtx, rtx));
427 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
428 static void instantiate_decls PROTO((tree, int));
429 static void instantiate_decls_1 PROTO((tree, int));
430 static void instantiate_decl PROTO((rtx, int, int));
431 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
432 static void delete_handlers PROTO((void));
433 static void pad_to_arg_alignment PROTO((struct args_size *, int));
434 static void pad_below PROTO((struct args_size *, enum machine_mode,
435 tree));
436 static tree round_down PROTO((tree, int));
437 static rtx round_trampoline_addr PROTO((rtx));
438 static tree blocks_nreverse PROTO((tree));
439 static int all_blocks PROTO((tree, tree *));
440 static int *record_insns PROTO((rtx));
441 static int contains PROTO((rtx, int *));
443 /* Pointer to chain of `struct function' for containing functions. */
444 struct function *outer_function_chain;
446 /* Given a function decl for a containing function,
447 return the `struct function' for it. */
449 struct function *
450 find_function_data (decl)
451 tree decl;
453 struct function *p;
454 for (p = outer_function_chain; p; p = p->next)
455 if (p->decl == decl)
456 return p;
457 abort ();
460 /* Save the current context for compilation of a nested function.
461 This is called from language-specific code.
462 The caller is responsible for saving any language-specific status,
463 since this function knows only about language-independent variables. */
465 void
466 push_function_context_to (context)
467 tree context;
469 struct function *p = (struct function *) xmalloc (sizeof (struct function));
471 p->next = outer_function_chain;
472 outer_function_chain = p;
474 p->name = current_function_name;
475 p->decl = current_function_decl;
476 p->pops_args = current_function_pops_args;
477 p->returns_struct = current_function_returns_struct;
478 p->returns_pcc_struct = current_function_returns_pcc_struct;
479 p->returns_pointer = current_function_returns_pointer;
480 p->needs_context = current_function_needs_context;
481 p->calls_setjmp = current_function_calls_setjmp;
482 p->calls_longjmp = current_function_calls_longjmp;
483 p->calls_alloca = current_function_calls_alloca;
484 p->has_nonlocal_label = current_function_has_nonlocal_label;
485 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
486 p->contains_functions = current_function_contains_functions;
487 p->args_size = current_function_args_size;
488 p->pretend_args_size = current_function_pretend_args_size;
489 p->arg_offset_rtx = current_function_arg_offset_rtx;
490 p->varargs = current_function_varargs;
491 p->stdarg = current_function_stdarg;
492 p->uses_const_pool = current_function_uses_const_pool;
493 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
494 p->internal_arg_pointer = current_function_internal_arg_pointer;
495 p->max_parm_reg = max_parm_reg;
496 p->parm_reg_stack_loc = parm_reg_stack_loc;
497 p->outgoing_args_size = current_function_outgoing_args_size;
498 p->return_rtx = current_function_return_rtx;
499 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
500 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
501 p->nonlocal_labels = nonlocal_labels;
502 p->cleanup_label = cleanup_label;
503 p->return_label = return_label;
504 p->save_expr_regs = save_expr_regs;
505 p->stack_slot_list = stack_slot_list;
506 p->parm_birth_insn = parm_birth_insn;
507 p->frame_offset = frame_offset;
508 p->tail_recursion_label = tail_recursion_label;
509 p->tail_recursion_reentry = tail_recursion_reentry;
510 p->arg_pointer_save_area = arg_pointer_save_area;
511 p->rtl_expr_chain = rtl_expr_chain;
512 p->last_parm_insn = last_parm_insn;
513 p->context_display = context_display;
514 p->trampoline_list = trampoline_list;
515 p->function_call_count = function_call_count;
516 p->temp_slots = temp_slots;
517 p->temp_slot_level = temp_slot_level;
518 p->fixup_var_refs_queue = 0;
519 p->epilogue_delay_list = current_function_epilogue_delay_list;
521 save_tree_status (p, context);
522 save_storage_status (p);
523 save_emit_status (p);
524 init_emit ();
525 save_expr_status (p);
526 save_stmt_status (p);
527 save_varasm_status (p);
529 if (save_machine_status)
530 (*save_machine_status) (p);
533 void
534 push_function_context ()
536 push_function_context_to (current_function_decl);
539 /* Restore the last saved context, at the end of a nested function.
540 This function is called from language-specific code. */
542 void
543 pop_function_context_from (context)
544 tree context;
546 struct function *p = outer_function_chain;
548 outer_function_chain = p->next;
550 current_function_contains_functions
551 = p->contains_functions || p->inline_obstacks
552 || context == current_function_decl;
553 current_function_name = p->name;
554 current_function_decl = p->decl;
555 current_function_pops_args = p->pops_args;
556 current_function_returns_struct = p->returns_struct;
557 current_function_returns_pcc_struct = p->returns_pcc_struct;
558 current_function_returns_pointer = p->returns_pointer;
559 current_function_needs_context = p->needs_context;
560 current_function_calls_setjmp = p->calls_setjmp;
561 current_function_calls_longjmp = p->calls_longjmp;
562 current_function_calls_alloca = p->calls_alloca;
563 current_function_has_nonlocal_label = p->has_nonlocal_label;
564 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
565 current_function_args_size = p->args_size;
566 current_function_pretend_args_size = p->pretend_args_size;
567 current_function_arg_offset_rtx = p->arg_offset_rtx;
568 current_function_varargs = p->varargs;
569 current_function_stdarg = p->stdarg;
570 current_function_uses_const_pool = p->uses_const_pool;
571 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
572 current_function_internal_arg_pointer = p->internal_arg_pointer;
573 max_parm_reg = p->max_parm_reg;
574 parm_reg_stack_loc = p->parm_reg_stack_loc;
575 current_function_outgoing_args_size = p->outgoing_args_size;
576 current_function_return_rtx = p->return_rtx;
577 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
578 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
579 nonlocal_labels = p->nonlocal_labels;
580 cleanup_label = p->cleanup_label;
581 return_label = p->return_label;
582 save_expr_regs = p->save_expr_regs;
583 stack_slot_list = p->stack_slot_list;
584 parm_birth_insn = p->parm_birth_insn;
585 frame_offset = p->frame_offset;
586 tail_recursion_label = p->tail_recursion_label;
587 tail_recursion_reentry = p->tail_recursion_reentry;
588 arg_pointer_save_area = p->arg_pointer_save_area;
589 rtl_expr_chain = p->rtl_expr_chain;
590 last_parm_insn = p->last_parm_insn;
591 context_display = p->context_display;
592 trampoline_list = p->trampoline_list;
593 function_call_count = p->function_call_count;
594 temp_slots = p->temp_slots;
595 temp_slot_level = p->temp_slot_level;
596 current_function_epilogue_delay_list = p->epilogue_delay_list;
597 reg_renumber = 0;
599 restore_tree_status (p);
600 restore_storage_status (p);
601 restore_expr_status (p);
602 restore_emit_status (p);
603 restore_stmt_status (p);
604 restore_varasm_status (p);
606 if (restore_machine_status)
607 (*restore_machine_status) (p);
609 /* Finish doing put_var_into_stack for any of our variables
610 which became addressable during the nested function. */
612 struct var_refs_queue *queue = p->fixup_var_refs_queue;
613 for (; queue; queue = queue->next)
614 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
617 free (p);
619 /* Reset variables that have known state during rtx generation. */
620 rtx_equal_function_value_matters = 1;
621 virtuals_instantiated = 0;
624 void pop_function_context ()
626 pop_function_context_from (current_function_decl);
629 /* Allocate fixed slots in the stack frame of the current function. */
631 /* Return size needed for stack frame based on slots so far allocated.
632 This size counts from zero. It is not rounded to STACK_BOUNDARY;
633 the caller may have to do that. */
636 get_frame_size ()
638 #ifdef FRAME_GROWS_DOWNWARD
639 return -frame_offset;
640 #else
641 return frame_offset;
642 #endif
645 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
646 with machine mode MODE.
648 ALIGN controls the amount of alignment for the address of the slot:
649 0 means according to MODE,
650 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
651 positive specifies alignment boundary in bits.
653 We do not round to stack_boundary here. */
656 assign_stack_local (mode, size, align)
657 enum machine_mode mode;
658 int size;
659 int align;
661 register rtx x, addr;
662 int bigend_correction = 0;
663 int alignment;
665 if (align == 0)
667 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
668 if (mode == BLKmode)
669 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
671 else if (align == -1)
673 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
674 size = CEIL_ROUND (size, alignment);
676 else
677 alignment = align / BITS_PER_UNIT;
679 /* Round frame offset to that alignment.
680 We must be careful here, since FRAME_OFFSET might be negative and
681 division with a negative dividend isn't as well defined as we might
682 like. So we instead assume that ALIGNMENT is a power of two and
683 use logical operations which are unambiguous. */
684 #ifdef FRAME_GROWS_DOWNWARD
685 frame_offset = FLOOR_ROUND (frame_offset, alignment);
686 #else
687 frame_offset = CEIL_ROUND (frame_offset, alignment);
688 #endif
690 /* On a big-endian machine, if we are allocating more space than we will use,
691 use the least significant bytes of those that are allocated. */
692 if (BYTES_BIG_ENDIAN && mode != BLKmode)
693 bigend_correction = size - GET_MODE_SIZE (mode);
695 #ifdef FRAME_GROWS_DOWNWARD
696 frame_offset -= size;
697 #endif
699 /* If we have already instantiated virtual registers, return the actual
700 address relative to the frame pointer. */
701 if (virtuals_instantiated)
702 addr = plus_constant (frame_pointer_rtx,
703 (frame_offset + bigend_correction
704 + STARTING_FRAME_OFFSET));
705 else
706 addr = plus_constant (virtual_stack_vars_rtx,
707 frame_offset + bigend_correction);
709 #ifndef FRAME_GROWS_DOWNWARD
710 frame_offset += size;
711 #endif
713 x = gen_rtx (MEM, mode, addr);
715 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
717 return x;
720 /* Assign a stack slot in a containing function.
721 First three arguments are same as in preceding function.
722 The last argument specifies the function to allocate in. */
725 assign_outer_stack_local (mode, size, align, function)
726 enum machine_mode mode;
727 int size;
728 int align;
729 struct function *function;
731 register rtx x, addr;
732 int bigend_correction = 0;
733 int alignment;
735 /* Allocate in the memory associated with the function in whose frame
736 we are assigning. */
737 push_obstacks (function->function_obstack,
738 function->function_maybepermanent_obstack);
740 if (align == 0)
742 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
743 if (mode == BLKmode)
744 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
746 else if (align == -1)
748 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
749 size = CEIL_ROUND (size, alignment);
751 else
752 alignment = align / BITS_PER_UNIT;
754 /* Round frame offset to that alignment. */
755 #ifdef FRAME_GROWS_DOWNWARD
756 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
757 #else
758 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
759 #endif
761 /* On a big-endian machine, if we are allocating more space than we will use,
762 use the least significant bytes of those that are allocated. */
763 if (BYTES_BIG_ENDIAN && mode != BLKmode)
764 bigend_correction = size - GET_MODE_SIZE (mode);
766 #ifdef FRAME_GROWS_DOWNWARD
767 function->frame_offset -= size;
768 #endif
769 addr = plus_constant (virtual_stack_vars_rtx,
770 function->frame_offset + bigend_correction);
771 #ifndef FRAME_GROWS_DOWNWARD
772 function->frame_offset += size;
773 #endif
775 x = gen_rtx (MEM, mode, addr);
777 function->stack_slot_list
778 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
780 pop_obstacks ();
782 return x;
785 /* Allocate a temporary stack slot and record it for possible later
786 reuse.
788 MODE is the machine mode to be given to the returned rtx.
790 SIZE is the size in units of the space required. We do no rounding here
791 since assign_stack_local will do any required rounding.
793 KEEP is 1 if this slot is to be retained after a call to
794 free_temp_slots. Automatic variables for a block are allocated
795 with this flag. KEEP is 2, if we allocate a longer term temporary,
796 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
799 assign_stack_temp (mode, size, keep)
800 enum machine_mode mode;
801 int size;
802 int keep;
804 struct temp_slot *p, *best_p = 0;
806 /* If SIZE is -1 it means that somebody tried to allocate a temporary
807 of a variable size. */
808 if (size == -1)
809 abort ();
811 /* First try to find an available, already-allocated temporary that is the
812 exact size we require. */
813 for (p = temp_slots; p; p = p->next)
814 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
815 break;
817 /* If we didn't find, one, try one that is larger than what we want. We
818 find the smallest such. */
819 if (p == 0)
820 for (p = temp_slots; p; p = p->next)
821 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
822 && (best_p == 0 || best_p->size > p->size))
823 best_p = p;
825 /* Make our best, if any, the one to use. */
826 if (best_p)
828 /* If there are enough aligned bytes left over, make them into a new
829 temp_slot so that the extra bytes don't get wasted. Do this only
830 for BLKmode slots, so that we can be sure of the alignment. */
831 if (GET_MODE (best_p->slot) == BLKmode)
833 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
834 int rounded_size = CEIL_ROUND (size, alignment);
836 if (best_p->size - rounded_size >= alignment)
838 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
839 p->in_use = p->addr_taken = 0;
840 p->size = best_p->size - rounded_size;
841 p->base_offset = best_p->base_offset + rounded_size;
842 p->full_size = best_p->full_size - rounded_size;
843 p->slot = gen_rtx (MEM, BLKmode,
844 plus_constant (XEXP (best_p->slot, 0),
845 rounded_size));
846 p->address = 0;
847 p->rtl_expr = 0;
848 p->next = temp_slots;
849 temp_slots = p;
851 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
852 stack_slot_list);
854 best_p->size = rounded_size;
855 best_p->full_size = rounded_size;
859 p = best_p;
862 /* If we still didn't find one, make a new temporary. */
863 if (p == 0)
865 int frame_offset_old = frame_offset;
866 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
867 /* If the temp slot mode doesn't indicate the alignment,
868 use the largest possible, so no one will be disappointed. */
869 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
870 /* The following slot size computation is necessary because we don't
871 know the actual size of the temporary slot until assign_stack_local
872 has performed all the frame alignment and size rounding for the
873 requested temporary. Note that extra space added for alignment
874 can be either above or below this stack slot depending on which
875 way the frame grows. We include the extra space if and only if it
876 is above this slot. */
877 #ifdef FRAME_GROWS_DOWNWARD
878 p->size = frame_offset_old - frame_offset;
879 #else
880 p->size = size;
881 #endif
882 /* Now define the fields used by combine_temp_slots. */
883 #ifdef FRAME_GROWS_DOWNWARD
884 p->base_offset = frame_offset;
885 p->full_size = frame_offset_old - frame_offset;
886 #else
887 p->base_offset = frame_offset_old;
888 p->full_size = frame_offset - frame_offset_old;
889 #endif
890 p->address = 0;
891 p->next = temp_slots;
892 temp_slots = p;
895 p->in_use = 1;
896 p->addr_taken = 0;
897 p->rtl_expr = sequence_rtl_expr;
899 if (keep == 2)
901 p->level = target_temp_slot_level;
902 p->keep = 0;
904 else
906 p->level = temp_slot_level;
907 p->keep = keep;
909 return p->slot;
912 /* Assign a temporary of given TYPE.
913 KEEP is as for assign_stack_temp.
914 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
915 it is 0 if a register is OK.
916 DONT_PROMOTE is 1 if we should not promote values in register
917 to wider modes. */
920 assign_temp (type, keep, memory_required, dont_promote)
921 tree type;
922 int keep;
923 int memory_required;
924 int dont_promote;
926 enum machine_mode mode = TYPE_MODE (type);
927 int unsignedp = TREE_UNSIGNED (type);
929 if (mode == BLKmode || memory_required)
931 int size = int_size_in_bytes (type);
932 rtx tmp;
934 /* Unfortunately, we don't yet know how to allocate variable-sized
935 temporaries. However, sometimes we have a fixed upper limit on
936 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
937 instead. This is the case for Chill variable-sized strings. */
938 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
939 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
940 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
941 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
943 tmp = assign_stack_temp (mode, size, keep);
944 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
945 return tmp;
948 #ifndef PROMOTE_FOR_CALL_ONLY
949 if (! dont_promote)
950 mode = promote_mode (type, mode, &unsignedp, 0);
951 #endif
953 return gen_reg_rtx (mode);
956 /* Combine temporary stack slots which are adjacent on the stack.
958 This allows for better use of already allocated stack space. This is only
959 done for BLKmode slots because we can be sure that we won't have alignment
960 problems in this case. */
962 void
963 combine_temp_slots ()
965 struct temp_slot *p, *q;
966 struct temp_slot *prev_p, *prev_q;
967 /* Determine where to free back to after this function. */
968 rtx free_pointer = rtx_alloc (CONST_INT);
970 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
972 int delete_p = 0;
973 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
974 for (q = p->next, prev_q = p; q; q = prev_q->next)
976 int delete_q = 0;
977 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
979 if (p->base_offset + p->full_size == q->base_offset)
981 /* Q comes after P; combine Q into P. */
982 p->size += q->size;
983 p->full_size += q->full_size;
984 delete_q = 1;
986 else if (q->base_offset + q->full_size == p->base_offset)
988 /* P comes after Q; combine P into Q. */
989 q->size += p->size;
990 q->full_size += p->full_size;
991 delete_p = 1;
992 break;
995 /* Either delete Q or advance past it. */
996 if (delete_q)
997 prev_q->next = q->next;
998 else
999 prev_q = q;
1001 /* Either delete P or advance past it. */
1002 if (delete_p)
1004 if (prev_p)
1005 prev_p->next = p->next;
1006 else
1007 temp_slots = p->next;
1009 else
1010 prev_p = p;
1013 /* Free all the RTL made by plus_constant. */
1014 rtx_free (free_pointer);
1017 /* Find the temp slot corresponding to the object at address X. */
1019 static struct temp_slot *
1020 find_temp_slot_from_address (x)
1021 rtx x;
1023 struct temp_slot *p;
1024 rtx next;
1026 for (p = temp_slots; p; p = p->next)
1028 if (! p->in_use)
1029 continue;
1030 else if (XEXP (p->slot, 0) == x
1031 || p->address == x
1032 || (GET_CODE (x) == PLUS
1033 && XEXP (x, 0) == virtual_stack_vars_rtx
1034 && GET_CODE (XEXP (x, 1)) == CONST_INT
1035 && INTVAL (XEXP (x, 1)) >= p->base_offset
1036 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1037 return p;
1039 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1040 for (next = p->address; next; next = XEXP (next, 1))
1041 if (XEXP (next, 0) == x)
1042 return p;
1045 return 0;
1048 /* Indicate that NEW is an alternate way of referring to the temp slot
1049 that previous was known by OLD. */
1051 void
1052 update_temp_slot_address (old, new)
1053 rtx old, new;
1055 struct temp_slot *p = find_temp_slot_from_address (old);
1057 /* If none, return. Else add NEW as an alias. */
1058 if (p == 0)
1059 return;
1060 else if (p->address == 0)
1061 p->address = new;
1062 else
1064 if (GET_CODE (p->address) != EXPR_LIST)
1065 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1067 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1071 /* If X could be a reference to a temporary slot, mark the fact that its
1072 address was taken. */
1074 void
1075 mark_temp_addr_taken (x)
1076 rtx x;
1078 struct temp_slot *p;
1080 if (x == 0)
1081 return;
1083 /* If X is not in memory or is at a constant address, it cannot be in
1084 a temporary slot. */
1085 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1086 return;
1088 p = find_temp_slot_from_address (XEXP (x, 0));
1089 if (p != 0)
1090 p->addr_taken = 1;
1093 /* If X could be a reference to a temporary slot, mark that slot as
1094 belonging to the to one level higher than the current level. If X
1095 matched one of our slots, just mark that one. Otherwise, we can't
1096 easily predict which it is, so upgrade all of them. Kept slots
1097 need not be touched.
1099 This is called when an ({...}) construct occurs and a statement
1100 returns a value in memory. */
1102 void
1103 preserve_temp_slots (x)
1104 rtx x;
1106 struct temp_slot *p = 0;
1108 /* If there is no result, we still might have some objects whose address
1109 were taken, so we need to make sure they stay around. */
1110 if (x == 0)
1112 for (p = temp_slots; p; p = p->next)
1113 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1114 p->level--;
1116 return;
1119 /* If X is a register that is being used as a pointer, see if we have
1120 a temporary slot we know it points to. To be consistent with
1121 the code below, we really should preserve all non-kept slots
1122 if we can't find a match, but that seems to be much too costly. */
1123 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1124 p = find_temp_slot_from_address (x);
1126 /* If X is not in memory or is at a constant address, it cannot be in
1127 a temporary slot, but it can contain something whose address was
1128 taken. */
1129 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1131 for (p = temp_slots; p; p = p->next)
1132 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1133 p->level--;
1135 return;
1138 /* First see if we can find a match. */
1139 if (p == 0)
1140 p = find_temp_slot_from_address (XEXP (x, 0));
1142 if (p != 0)
1144 /* Move everything at our level whose address was taken to our new
1145 level in case we used its address. */
1146 struct temp_slot *q;
1148 if (p->level == temp_slot_level)
1150 for (q = temp_slots; q; q = q->next)
1151 if (q != p && q->addr_taken && q->level == p->level)
1152 q->level--;
1154 p->level--;
1155 p->addr_taken = 0;
1157 return;
1160 /* Otherwise, preserve all non-kept slots at this level. */
1161 for (p = temp_slots; p; p = p->next)
1162 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1163 p->level--;
1166 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1167 with that RTL_EXPR, promote it into a temporary slot at the present
1168 level so it will not be freed when we free slots made in the
1169 RTL_EXPR. */
1171 void
1172 preserve_rtl_expr_result (x)
1173 rtx x;
1175 struct temp_slot *p;
1177 /* If X is not in memory or is at a constant address, it cannot be in
1178 a temporary slot. */
1179 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1180 return;
1182 /* If we can find a match, move it to our level unless it is already at
1183 an upper level. */
1184 p = find_temp_slot_from_address (XEXP (x, 0));
1185 if (p != 0)
1187 p->level = MIN (p->level, temp_slot_level);
1188 p->rtl_expr = 0;
1191 return;
1194 /* Free all temporaries used so far. This is normally called at the end
1195 of generating code for a statement. Don't free any temporaries
1196 currently in use for an RTL_EXPR that hasn't yet been emitted.
1197 We could eventually do better than this since it can be reused while
1198 generating the same RTL_EXPR, but this is complex and probably not
1199 worthwhile. */
1201 void
1202 free_temp_slots ()
1204 struct temp_slot *p;
1206 for (p = temp_slots; p; p = p->next)
1207 if (p->in_use && p->level == temp_slot_level && ! p->keep
1208 && p->rtl_expr == 0)
1209 p->in_use = 0;
1211 combine_temp_slots ();
1214 /* Free all temporary slots used in T, an RTL_EXPR node. */
1216 void
1217 free_temps_for_rtl_expr (t)
1218 tree t;
1220 struct temp_slot *p;
1222 for (p = temp_slots; p; p = p->next)
1223 if (p->rtl_expr == t)
1224 p->in_use = 0;
1226 combine_temp_slots ();
1229 /* Push deeper into the nesting level for stack temporaries. */
1231 void
1232 push_temp_slots ()
1234 temp_slot_level++;
1237 /* Pop a temporary nesting level. All slots in use in the current level
1238 are freed. */
1240 void
1241 pop_temp_slots ()
1243 struct temp_slot *p;
1245 for (p = temp_slots; p; p = p->next)
1246 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1247 p->in_use = 0;
1249 combine_temp_slots ();
1251 temp_slot_level--;
1254 /* Initialize temporary slots. */
1256 void
1257 init_temp_slots ()
1259 /* We have not allocated any temporaries yet. */
1260 temp_slots = 0;
1261 temp_slot_level = 0;
1262 target_temp_slot_level = 0;
1265 /* Retroactively move an auto variable from a register to a stack slot.
1266 This is done when an address-reference to the variable is seen. */
1268 void
1269 put_var_into_stack (decl)
1270 tree decl;
1272 register rtx reg;
1273 enum machine_mode promoted_mode, decl_mode;
1274 struct function *function = 0;
1275 tree context;
1277 if (output_bytecode)
1278 return;
1280 context = decl_function_context (decl);
1282 /* Get the current rtl used for this object and it's original mode. */
1283 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1285 /* No need to do anything if decl has no rtx yet
1286 since in that case caller is setting TREE_ADDRESSABLE
1287 and a stack slot will be assigned when the rtl is made. */
1288 if (reg == 0)
1289 return;
1291 /* Get the declared mode for this object. */
1292 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1293 : DECL_MODE (decl));
1294 /* Get the mode it's actually stored in. */
1295 promoted_mode = GET_MODE (reg);
1297 /* If this variable comes from an outer function,
1298 find that function's saved context. */
1299 if (context != current_function_decl)
1300 for (function = outer_function_chain; function; function = function->next)
1301 if (function->decl == context)
1302 break;
1304 /* If this is a variable-size object with a pseudo to address it,
1305 put that pseudo into the stack, if the var is nonlocal. */
1306 if (DECL_NONLOCAL (decl)
1307 && GET_CODE (reg) == MEM
1308 && GET_CODE (XEXP (reg, 0)) == REG
1309 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1311 reg = XEXP (reg, 0);
1312 decl_mode = promoted_mode = GET_MODE (reg);
1315 /* Now we should have a value that resides in one or more pseudo regs. */
1317 if (GET_CODE (reg) == REG)
1318 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1319 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1320 else if (GET_CODE (reg) == CONCAT)
1322 /* A CONCAT contains two pseudos; put them both in the stack.
1323 We do it so they end up consecutive. */
1324 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1325 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1326 #ifdef FRAME_GROWS_DOWNWARD
1327 /* Since part 0 should have a lower address, do it second. */
1328 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1329 part_mode, TREE_SIDE_EFFECTS (decl));
1330 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1331 part_mode, TREE_SIDE_EFFECTS (decl));
1332 #else
1333 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1334 part_mode, TREE_SIDE_EFFECTS (decl));
1335 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1336 part_mode, TREE_SIDE_EFFECTS (decl));
1337 #endif
1339 /* Change the CONCAT into a combined MEM for both parts. */
1340 PUT_CODE (reg, MEM);
1341 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1343 /* The two parts are in memory order already.
1344 Use the lower parts address as ours. */
1345 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1346 /* Prevent sharing of rtl that might lose. */
1347 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1348 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1352 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1353 into the stack frame of FUNCTION (0 means the current function).
1354 DECL_MODE is the machine mode of the user-level data type.
1355 PROMOTED_MODE is the machine mode of the register.
1356 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1358 static void
1359 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1360 struct function *function;
1361 rtx reg;
1362 tree type;
1363 enum machine_mode promoted_mode, decl_mode;
1364 int volatile_p;
1366 rtx new = 0;
1368 if (function)
1370 if (REGNO (reg) < function->max_parm_reg)
1371 new = function->parm_reg_stack_loc[REGNO (reg)];
1372 if (new == 0)
1373 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1374 0, function);
1376 else
1378 if (REGNO (reg) < max_parm_reg)
1379 new = parm_reg_stack_loc[REGNO (reg)];
1380 if (new == 0)
1381 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1384 PUT_MODE (reg, decl_mode);
1385 XEXP (reg, 0) = XEXP (new, 0);
1386 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1387 MEM_VOLATILE_P (reg) = volatile_p;
1388 PUT_CODE (reg, MEM);
1390 /* If this is a memory ref that contains aggregate components,
1391 mark it as such for cse and loop optimize. */
1392 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1394 /* Now make sure that all refs to the variable, previously made
1395 when it was a register, are fixed up to be valid again. */
1396 if (function)
1398 struct var_refs_queue *temp;
1400 /* Variable is inherited; fix it up when we get back to its function. */
1401 push_obstacks (function->function_obstack,
1402 function->function_maybepermanent_obstack);
1404 /* See comment in restore_tree_status in tree.c for why this needs to be
1405 on saveable obstack. */
1406 temp
1407 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1408 temp->modified = reg;
1409 temp->promoted_mode = promoted_mode;
1410 temp->unsignedp = TREE_UNSIGNED (type);
1411 temp->next = function->fixup_var_refs_queue;
1412 function->fixup_var_refs_queue = temp;
1413 pop_obstacks ();
1415 else
1416 /* Variable is local; fix it up now. */
1417 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1420 static void
1421 fixup_var_refs (var, promoted_mode, unsignedp)
1422 rtx var;
1423 enum machine_mode promoted_mode;
1424 int unsignedp;
1426 tree pending;
1427 rtx first_insn = get_insns ();
1428 struct sequence_stack *stack = sequence_stack;
1429 tree rtl_exps = rtl_expr_chain;
1431 /* Must scan all insns for stack-refs that exceed the limit. */
1432 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1434 /* Scan all pending sequences too. */
1435 for (; stack; stack = stack->next)
1437 push_to_sequence (stack->first);
1438 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1439 stack->first, stack->next != 0);
1440 /* Update remembered end of sequence
1441 in case we added an insn at the end. */
1442 stack->last = get_last_insn ();
1443 end_sequence ();
1446 /* Scan all waiting RTL_EXPRs too. */
1447 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1449 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1450 if (seq != const0_rtx && seq != 0)
1452 push_to_sequence (seq);
1453 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1454 end_sequence ();
1459 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1460 some part of an insn. Return a struct fixup_replacement whose OLD
1461 value is equal to X. Allocate a new structure if no such entry exists. */
1463 static struct fixup_replacement *
1464 find_fixup_replacement (replacements, x)
1465 struct fixup_replacement **replacements;
1466 rtx x;
1468 struct fixup_replacement *p;
1470 /* See if we have already replaced this. */
1471 for (p = *replacements; p && p->old != x; p = p->next)
1474 if (p == 0)
1476 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1477 p->old = x;
1478 p->new = 0;
1479 p->next = *replacements;
1480 *replacements = p;
1483 return p;
1486 /* Scan the insn-chain starting with INSN for refs to VAR
1487 and fix them up. TOPLEVEL is nonzero if this chain is the
1488 main chain of insns for the current function. */
1490 static void
1491 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1492 rtx var;
1493 enum machine_mode promoted_mode;
1494 int unsignedp;
1495 rtx insn;
1496 int toplevel;
1498 rtx call_dest = 0;
1500 while (insn)
1502 rtx next = NEXT_INSN (insn);
1503 rtx note;
1504 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1506 /* If this is a CLOBBER of VAR, delete it.
1508 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1509 and REG_RETVAL notes too. */
1510 if (GET_CODE (PATTERN (insn)) == CLOBBER
1511 && XEXP (PATTERN (insn), 0) == var)
1513 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1514 /* The REG_LIBCALL note will go away since we are going to
1515 turn INSN into a NOTE, so just delete the
1516 corresponding REG_RETVAL note. */
1517 remove_note (XEXP (note, 0),
1518 find_reg_note (XEXP (note, 0), REG_RETVAL,
1519 NULL_RTX));
1521 /* In unoptimized compilation, we shouldn't call delete_insn
1522 except in jump.c doing warnings. */
1523 PUT_CODE (insn, NOTE);
1524 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1525 NOTE_SOURCE_FILE (insn) = 0;
1528 /* The insn to load VAR from a home in the arglist
1529 is now a no-op. When we see it, just delete it. */
1530 else if (toplevel
1531 && GET_CODE (PATTERN (insn)) == SET
1532 && SET_DEST (PATTERN (insn)) == var
1533 /* If this represents the result of an insn group,
1534 don't delete the insn. */
1535 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1536 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1538 /* In unoptimized compilation, we shouldn't call delete_insn
1539 except in jump.c doing warnings. */
1540 PUT_CODE (insn, NOTE);
1541 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1542 NOTE_SOURCE_FILE (insn) = 0;
1543 if (insn == last_parm_insn)
1544 last_parm_insn = PREV_INSN (next);
1546 else
1548 struct fixup_replacement *replacements = 0;
1549 rtx next_insn = NEXT_INSN (insn);
1551 #ifdef SMALL_REGISTER_CLASSES
1552 /* If the insn that copies the results of a CALL_INSN
1553 into a pseudo now references VAR, we have to use an
1554 intermediate pseudo since we want the life of the
1555 return value register to be only a single insn.
1557 If we don't use an intermediate pseudo, such things as
1558 address computations to make the address of VAR valid
1559 if it is not can be placed between the CALL_INSN and INSN.
1561 To make sure this doesn't happen, we record the destination
1562 of the CALL_INSN and see if the next insn uses both that
1563 and VAR. */
1565 if (call_dest != 0 && GET_CODE (insn) == INSN
1566 && reg_mentioned_p (var, PATTERN (insn))
1567 && reg_mentioned_p (call_dest, PATTERN (insn)))
1569 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1571 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1573 PATTERN (insn) = replace_rtx (PATTERN (insn),
1574 call_dest, temp);
1577 if (GET_CODE (insn) == CALL_INSN
1578 && GET_CODE (PATTERN (insn)) == SET)
1579 call_dest = SET_DEST (PATTERN (insn));
1580 else if (GET_CODE (insn) == CALL_INSN
1581 && GET_CODE (PATTERN (insn)) == PARALLEL
1582 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1583 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1584 else
1585 call_dest = 0;
1586 #endif
1588 /* See if we have to do anything to INSN now that VAR is in
1589 memory. If it needs to be loaded into a pseudo, use a single
1590 pseudo for the entire insn in case there is a MATCH_DUP
1591 between two operands. We pass a pointer to the head of
1592 a list of struct fixup_replacements. If fixup_var_refs_1
1593 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1594 it will record them in this list.
1596 If it allocated a pseudo for any replacement, we copy into
1597 it here. */
1599 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1600 &replacements);
1602 /* If this is last_parm_insn, and any instructions were output
1603 after it to fix it up, then we must set last_parm_insn to
1604 the last such instruction emitted. */
1605 if (insn == last_parm_insn)
1606 last_parm_insn = PREV_INSN (next_insn);
1608 while (replacements)
1610 if (GET_CODE (replacements->new) == REG)
1612 rtx insert_before;
1613 rtx seq;
1615 /* OLD might be a (subreg (mem)). */
1616 if (GET_CODE (replacements->old) == SUBREG)
1617 replacements->old
1618 = fixup_memory_subreg (replacements->old, insn, 0);
1619 else
1620 replacements->old
1621 = fixup_stack_1 (replacements->old, insn);
1623 insert_before = insn;
1625 /* If we are changing the mode, do a conversion.
1626 This might be wasteful, but combine.c will
1627 eliminate much of the waste. */
1629 if (GET_MODE (replacements->new)
1630 != GET_MODE (replacements->old))
1632 start_sequence ();
1633 convert_move (replacements->new,
1634 replacements->old, unsignedp);
1635 seq = gen_sequence ();
1636 end_sequence ();
1638 else
1639 seq = gen_move_insn (replacements->new,
1640 replacements->old);
1642 emit_insn_before (seq, insert_before);
1645 replacements = replacements->next;
1649 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1650 But don't touch other insns referred to by reg-notes;
1651 we will get them elsewhere. */
1652 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1653 if (GET_CODE (note) != INSN_LIST)
1654 XEXP (note, 0)
1655 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1657 insn = next;
1661 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1662 See if the rtx expression at *LOC in INSN needs to be changed.
1664 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1665 contain a list of original rtx's and replacements. If we find that we need
1666 to modify this insn by replacing a memory reference with a pseudo or by
1667 making a new MEM to implement a SUBREG, we consult that list to see if
1668 we have already chosen a replacement. If none has already been allocated,
1669 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1670 or the SUBREG, as appropriate, to the pseudo. */
1672 static void
1673 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1674 register rtx var;
1675 enum machine_mode promoted_mode;
1676 register rtx *loc;
1677 rtx insn;
1678 struct fixup_replacement **replacements;
1680 register int i;
1681 register rtx x = *loc;
1682 RTX_CODE code = GET_CODE (x);
1683 register char *fmt;
1684 register rtx tem, tem1;
1685 struct fixup_replacement *replacement;
1687 switch (code)
1689 case MEM:
1690 if (var == x)
1692 /* If we already have a replacement, use it. Otherwise,
1693 try to fix up this address in case it is invalid. */
1695 replacement = find_fixup_replacement (replacements, var);
1696 if (replacement->new)
1698 *loc = replacement->new;
1699 return;
1702 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1704 /* Unless we are forcing memory to register or we changed the mode,
1705 we can leave things the way they are if the insn is valid. */
1707 INSN_CODE (insn) = -1;
1708 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1709 && recog_memoized (insn) >= 0)
1710 return;
1712 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1713 return;
1716 /* If X contains VAR, we need to unshare it here so that we update
1717 each occurrence separately. But all identical MEMs in one insn
1718 must be replaced with the same rtx because of the possibility of
1719 MATCH_DUPs. */
1721 if (reg_mentioned_p (var, x))
1723 replacement = find_fixup_replacement (replacements, x);
1724 if (replacement->new == 0)
1725 replacement->new = copy_most_rtx (x, var);
1727 *loc = x = replacement->new;
1729 break;
1731 case REG:
1732 case CC0:
1733 case PC:
1734 case CONST_INT:
1735 case CONST:
1736 case SYMBOL_REF:
1737 case LABEL_REF:
1738 case CONST_DOUBLE:
1739 return;
1741 case SIGN_EXTRACT:
1742 case ZERO_EXTRACT:
1743 /* Note that in some cases those types of expressions are altered
1744 by optimize_bit_field, and do not survive to get here. */
1745 if (XEXP (x, 0) == var
1746 || (GET_CODE (XEXP (x, 0)) == SUBREG
1747 && SUBREG_REG (XEXP (x, 0)) == var))
1749 /* Get TEM as a valid MEM in the mode presently in the insn.
1751 We don't worry about the possibility of MATCH_DUP here; it
1752 is highly unlikely and would be tricky to handle. */
1754 tem = XEXP (x, 0);
1755 if (GET_CODE (tem) == SUBREG)
1757 if (GET_MODE_BITSIZE (GET_MODE (tem))
1758 > GET_MODE_BITSIZE (GET_MODE (var)))
1760 replacement = find_fixup_replacement (replacements, var);
1761 if (replacement->new == 0)
1762 replacement->new = gen_reg_rtx (GET_MODE (var));
1763 SUBREG_REG (tem) = replacement->new;
1766 tem = fixup_memory_subreg (tem, insn, 0);
1768 else
1769 tem = fixup_stack_1 (tem, insn);
1771 /* Unless we want to load from memory, get TEM into the proper mode
1772 for an extract from memory. This can only be done if the
1773 extract is at a constant position and length. */
1775 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1776 && GET_CODE (XEXP (x, 2)) == CONST_INT
1777 && ! mode_dependent_address_p (XEXP (tem, 0))
1778 && ! MEM_VOLATILE_P (tem))
1780 enum machine_mode wanted_mode = VOIDmode;
1781 enum machine_mode is_mode = GET_MODE (tem);
1782 int width = INTVAL (XEXP (x, 1));
1783 int pos = INTVAL (XEXP (x, 2));
1785 #ifdef HAVE_extzv
1786 if (GET_CODE (x) == ZERO_EXTRACT)
1787 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1788 #endif
1789 #ifdef HAVE_extv
1790 if (GET_CODE (x) == SIGN_EXTRACT)
1791 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1792 #endif
1793 /* If we have a narrower mode, we can do something. */
1794 if (wanted_mode != VOIDmode
1795 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1797 int offset = pos / BITS_PER_UNIT;
1798 rtx old_pos = XEXP (x, 2);
1799 rtx newmem;
1801 /* If the bytes and bits are counted differently, we
1802 must adjust the offset. */
1803 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1804 offset = (GET_MODE_SIZE (is_mode)
1805 - GET_MODE_SIZE (wanted_mode) - offset);
1807 pos %= GET_MODE_BITSIZE (wanted_mode);
1809 newmem = gen_rtx (MEM, wanted_mode,
1810 plus_constant (XEXP (tem, 0), offset));
1811 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1812 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1813 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1815 /* Make the change and see if the insn remains valid. */
1816 INSN_CODE (insn) = -1;
1817 XEXP (x, 0) = newmem;
1818 XEXP (x, 2) = GEN_INT (pos);
1820 if (recog_memoized (insn) >= 0)
1821 return;
1823 /* Otherwise, restore old position. XEXP (x, 0) will be
1824 restored later. */
1825 XEXP (x, 2) = old_pos;
1829 /* If we get here, the bitfield extract insn can't accept a memory
1830 reference. Copy the input into a register. */
1832 tem1 = gen_reg_rtx (GET_MODE (tem));
1833 emit_insn_before (gen_move_insn (tem1, tem), insn);
1834 XEXP (x, 0) = tem1;
1835 return;
1837 break;
1839 case SUBREG:
1840 if (SUBREG_REG (x) == var)
1842 /* If this is a special SUBREG made because VAR was promoted
1843 from a wider mode, replace it with VAR and call ourself
1844 recursively, this time saying that the object previously
1845 had its current mode (by virtue of the SUBREG). */
1847 if (SUBREG_PROMOTED_VAR_P (x))
1849 *loc = var;
1850 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1851 return;
1854 /* If this SUBREG makes VAR wider, it has become a paradoxical
1855 SUBREG with VAR in memory, but these aren't allowed at this
1856 stage of the compilation. So load VAR into a pseudo and take
1857 a SUBREG of that pseudo. */
1858 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1860 replacement = find_fixup_replacement (replacements, var);
1861 if (replacement->new == 0)
1862 replacement->new = gen_reg_rtx (GET_MODE (var));
1863 SUBREG_REG (x) = replacement->new;
1864 return;
1867 /* See if we have already found a replacement for this SUBREG.
1868 If so, use it. Otherwise, make a MEM and see if the insn
1869 is recognized. If not, or if we should force MEM into a register,
1870 make a pseudo for this SUBREG. */
1871 replacement = find_fixup_replacement (replacements, x);
1872 if (replacement->new)
1874 *loc = replacement->new;
1875 return;
1878 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1880 INSN_CODE (insn) = -1;
1881 if (! flag_force_mem && recog_memoized (insn) >= 0)
1882 return;
1884 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1885 return;
1887 break;
1889 case SET:
1890 /* First do special simplification of bit-field references. */
1891 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1892 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1893 optimize_bit_field (x, insn, 0);
1894 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1895 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1896 optimize_bit_field (x, insn, NULL_PTR);
1898 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1899 into a register and then store it back out. */
1900 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1901 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1902 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1903 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1904 > GET_MODE_SIZE (GET_MODE (var))))
1906 replacement = find_fixup_replacement (replacements, var);
1907 if (replacement->new == 0)
1908 replacement->new = gen_reg_rtx (GET_MODE (var));
1910 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1911 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1914 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1915 insn into a pseudo and store the low part of the pseudo into VAR. */
1916 if (GET_CODE (SET_DEST (x)) == SUBREG
1917 && SUBREG_REG (SET_DEST (x)) == var
1918 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1919 > GET_MODE_SIZE (GET_MODE (var))))
1921 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1922 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1923 tem)),
1924 insn);
1925 break;
1929 rtx dest = SET_DEST (x);
1930 rtx src = SET_SRC (x);
1931 rtx outerdest = dest;
1933 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1934 || GET_CODE (dest) == SIGN_EXTRACT
1935 || GET_CODE (dest) == ZERO_EXTRACT)
1936 dest = XEXP (dest, 0);
1938 if (GET_CODE (src) == SUBREG)
1939 src = XEXP (src, 0);
1941 /* If VAR does not appear at the top level of the SET
1942 just scan the lower levels of the tree. */
1944 if (src != var && dest != var)
1945 break;
1947 /* We will need to rerecognize this insn. */
1948 INSN_CODE (insn) = -1;
1950 #ifdef HAVE_insv
1951 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1953 /* Since this case will return, ensure we fixup all the
1954 operands here. */
1955 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1956 insn, replacements);
1957 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1958 insn, replacements);
1959 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1960 insn, replacements);
1962 tem = XEXP (outerdest, 0);
1964 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1965 that may appear inside a ZERO_EXTRACT.
1966 This was legitimate when the MEM was a REG. */
1967 if (GET_CODE (tem) == SUBREG
1968 && SUBREG_REG (tem) == var)
1969 tem = fixup_memory_subreg (tem, insn, 0);
1970 else
1971 tem = fixup_stack_1 (tem, insn);
1973 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1974 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1975 && ! mode_dependent_address_p (XEXP (tem, 0))
1976 && ! MEM_VOLATILE_P (tem))
1978 enum machine_mode wanted_mode
1979 = insn_operand_mode[(int) CODE_FOR_insv][0];
1980 enum machine_mode is_mode = GET_MODE (tem);
1981 int width = INTVAL (XEXP (outerdest, 1));
1982 int pos = INTVAL (XEXP (outerdest, 2));
1984 /* If we have a narrower mode, we can do something. */
1985 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1987 int offset = pos / BITS_PER_UNIT;
1988 rtx old_pos = XEXP (outerdest, 2);
1989 rtx newmem;
1991 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1992 offset = (GET_MODE_SIZE (is_mode)
1993 - GET_MODE_SIZE (wanted_mode) - offset);
1995 pos %= GET_MODE_BITSIZE (wanted_mode);
1997 newmem = gen_rtx (MEM, wanted_mode,
1998 plus_constant (XEXP (tem, 0), offset));
1999 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2000 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2001 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2003 /* Make the change and see if the insn remains valid. */
2004 INSN_CODE (insn) = -1;
2005 XEXP (outerdest, 0) = newmem;
2006 XEXP (outerdest, 2) = GEN_INT (pos);
2008 if (recog_memoized (insn) >= 0)
2009 return;
2011 /* Otherwise, restore old position. XEXP (x, 0) will be
2012 restored later. */
2013 XEXP (outerdest, 2) = old_pos;
2017 /* If we get here, the bit-field store doesn't allow memory
2018 or isn't located at a constant position. Load the value into
2019 a register, do the store, and put it back into memory. */
2021 tem1 = gen_reg_rtx (GET_MODE (tem));
2022 emit_insn_before (gen_move_insn (tem1, tem), insn);
2023 emit_insn_after (gen_move_insn (tem, tem1), insn);
2024 XEXP (outerdest, 0) = tem1;
2025 return;
2027 #endif
2029 /* STRICT_LOW_PART is a no-op on memory references
2030 and it can cause combinations to be unrecognizable,
2031 so eliminate it. */
2033 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2034 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2036 /* A valid insn to copy VAR into or out of a register
2037 must be left alone, to avoid an infinite loop here.
2038 If the reference to VAR is by a subreg, fix that up,
2039 since SUBREG is not valid for a memref.
2040 Also fix up the address of the stack slot.
2042 Note that we must not try to recognize the insn until
2043 after we know that we have valid addresses and no
2044 (subreg (mem ...) ...) constructs, since these interfere
2045 with determining the validity of the insn. */
2047 if ((SET_SRC (x) == var
2048 || (GET_CODE (SET_SRC (x)) == SUBREG
2049 && SUBREG_REG (SET_SRC (x)) == var))
2050 && (GET_CODE (SET_DEST (x)) == REG
2051 || (GET_CODE (SET_DEST (x)) == SUBREG
2052 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2053 && GET_MODE (var) == promoted_mode
2054 && x == single_set (insn))
2056 rtx pat;
2058 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2059 if (replacement->new)
2060 SET_SRC (x) = replacement->new;
2061 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2062 SET_SRC (x) = replacement->new
2063 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2064 else
2065 SET_SRC (x) = replacement->new
2066 = fixup_stack_1 (SET_SRC (x), insn);
2068 if (recog_memoized (insn) >= 0)
2069 return;
2071 /* INSN is not valid, but we know that we want to
2072 copy SET_SRC (x) to SET_DEST (x) in some way. So
2073 we generate the move and see whether it requires more
2074 than one insn. If it does, we emit those insns and
2075 delete INSN. Otherwise, we an just replace the pattern
2076 of INSN; we have already verified above that INSN has
2077 no other function that to do X. */
2079 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2080 if (GET_CODE (pat) == SEQUENCE)
2082 emit_insn_after (pat, insn);
2083 PUT_CODE (insn, NOTE);
2084 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2085 NOTE_SOURCE_FILE (insn) = 0;
2087 else
2088 PATTERN (insn) = pat;
2090 return;
2093 if ((SET_DEST (x) == var
2094 || (GET_CODE (SET_DEST (x)) == SUBREG
2095 && SUBREG_REG (SET_DEST (x)) == var))
2096 && (GET_CODE (SET_SRC (x)) == REG
2097 || (GET_CODE (SET_SRC (x)) == SUBREG
2098 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2099 && GET_MODE (var) == promoted_mode
2100 && x == single_set (insn))
2102 rtx pat;
2104 if (GET_CODE (SET_DEST (x)) == SUBREG)
2105 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2106 else
2107 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2109 if (recog_memoized (insn) >= 0)
2110 return;
2112 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2113 if (GET_CODE (pat) == SEQUENCE)
2115 emit_insn_after (pat, insn);
2116 PUT_CODE (insn, NOTE);
2117 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2118 NOTE_SOURCE_FILE (insn) = 0;
2120 else
2121 PATTERN (insn) = pat;
2123 return;
2126 /* Otherwise, storing into VAR must be handled specially
2127 by storing into a temporary and copying that into VAR
2128 with a new insn after this one. Note that this case
2129 will be used when storing into a promoted scalar since
2130 the insn will now have different modes on the input
2131 and output and hence will be invalid (except for the case
2132 of setting it to a constant, which does not need any
2133 change if it is valid). We generate extra code in that case,
2134 but combine.c will eliminate it. */
2136 if (dest == var)
2138 rtx temp;
2139 rtx fixeddest = SET_DEST (x);
2141 /* STRICT_LOW_PART can be discarded, around a MEM. */
2142 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2143 fixeddest = XEXP (fixeddest, 0);
2144 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2145 if (GET_CODE (fixeddest) == SUBREG)
2147 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2148 promoted_mode = GET_MODE (fixeddest);
2150 else
2151 fixeddest = fixup_stack_1 (fixeddest, insn);
2153 temp = gen_reg_rtx (promoted_mode);
2155 emit_insn_after (gen_move_insn (fixeddest,
2156 gen_lowpart (GET_MODE (fixeddest),
2157 temp)),
2158 insn);
2160 SET_DEST (x) = temp;
2165 /* Nothing special about this RTX; fix its operands. */
2167 fmt = GET_RTX_FORMAT (code);
2168 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2170 if (fmt[i] == 'e')
2171 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2172 if (fmt[i] == 'E')
2174 register int j;
2175 for (j = 0; j < XVECLEN (x, i); j++)
2176 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2177 insn, replacements);
2182 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2183 return an rtx (MEM:m1 newaddr) which is equivalent.
2184 If any insns must be emitted to compute NEWADDR, put them before INSN.
2186 UNCRITICAL nonzero means accept paradoxical subregs.
2187 This is used for subregs found inside REG_NOTES. */
2189 static rtx
2190 fixup_memory_subreg (x, insn, uncritical)
2191 rtx x;
2192 rtx insn;
2193 int uncritical;
2195 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2196 rtx addr = XEXP (SUBREG_REG (x), 0);
2197 enum machine_mode mode = GET_MODE (x);
2198 rtx saved, result;
2200 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2201 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2202 && ! uncritical)
2203 abort ();
2205 if (BYTES_BIG_ENDIAN)
2206 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2207 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2208 addr = plus_constant (addr, offset);
2209 if (!flag_force_addr && memory_address_p (mode, addr))
2210 /* Shortcut if no insns need be emitted. */
2211 return change_address (SUBREG_REG (x), mode, addr);
2212 start_sequence ();
2213 result = change_address (SUBREG_REG (x), mode, addr);
2214 emit_insn_before (gen_sequence (), insn);
2215 end_sequence ();
2216 return result;
2219 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2220 Replace subexpressions of X in place.
2221 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2222 Otherwise return X, with its contents possibly altered.
2224 If any insns must be emitted to compute NEWADDR, put them before INSN.
2226 UNCRITICAL is as in fixup_memory_subreg. */
2228 static rtx
2229 walk_fixup_memory_subreg (x, insn, uncritical)
2230 register rtx x;
2231 rtx insn;
2232 int uncritical;
2234 register enum rtx_code code;
2235 register char *fmt;
2236 register int i;
2238 if (x == 0)
2239 return 0;
2241 code = GET_CODE (x);
2243 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2244 return fixup_memory_subreg (x, insn, uncritical);
2246 /* Nothing special about this RTX; fix its operands. */
2248 fmt = GET_RTX_FORMAT (code);
2249 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2251 if (fmt[i] == 'e')
2252 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2253 if (fmt[i] == 'E')
2255 register int j;
2256 for (j = 0; j < XVECLEN (x, i); j++)
2257 XVECEXP (x, i, j)
2258 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2261 return x;
2264 /* For each memory ref within X, if it refers to a stack slot
2265 with an out of range displacement, put the address in a temp register
2266 (emitting new insns before INSN to load these registers)
2267 and alter the memory ref to use that register.
2268 Replace each such MEM rtx with a copy, to avoid clobberage. */
2270 static rtx
2271 fixup_stack_1 (x, insn)
2272 rtx x;
2273 rtx insn;
2275 register int i;
2276 register RTX_CODE code = GET_CODE (x);
2277 register char *fmt;
2279 if (code == MEM)
2281 register rtx ad = XEXP (x, 0);
2282 /* If we have address of a stack slot but it's not valid
2283 (displacement is too large), compute the sum in a register. */
2284 if (GET_CODE (ad) == PLUS
2285 && GET_CODE (XEXP (ad, 0)) == REG
2286 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2287 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2288 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2289 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2291 rtx temp, seq;
2292 if (memory_address_p (GET_MODE (x), ad))
2293 return x;
2295 start_sequence ();
2296 temp = copy_to_reg (ad);
2297 seq = gen_sequence ();
2298 end_sequence ();
2299 emit_insn_before (seq, insn);
2300 return change_address (x, VOIDmode, temp);
2302 return x;
2305 fmt = GET_RTX_FORMAT (code);
2306 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2308 if (fmt[i] == 'e')
2309 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2310 if (fmt[i] == 'E')
2312 register int j;
2313 for (j = 0; j < XVECLEN (x, i); j++)
2314 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2317 return x;
2320 /* Optimization: a bit-field instruction whose field
2321 happens to be a byte or halfword in memory
2322 can be changed to a move instruction.
2324 We call here when INSN is an insn to examine or store into a bit-field.
2325 BODY is the SET-rtx to be altered.
2327 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2328 (Currently this is called only from function.c, and EQUIV_MEM
2329 is always 0.) */
2331 static void
2332 optimize_bit_field (body, insn, equiv_mem)
2333 rtx body;
2334 rtx insn;
2335 rtx *equiv_mem;
2337 register rtx bitfield;
2338 int destflag;
2339 rtx seq = 0;
2340 enum machine_mode mode;
2342 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2343 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2344 bitfield = SET_DEST (body), destflag = 1;
2345 else
2346 bitfield = SET_SRC (body), destflag = 0;
2348 /* First check that the field being stored has constant size and position
2349 and is in fact a byte or halfword suitably aligned. */
2351 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2352 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2353 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2354 != BLKmode)
2355 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2357 register rtx memref = 0;
2359 /* Now check that the containing word is memory, not a register,
2360 and that it is safe to change the machine mode. */
2362 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2363 memref = XEXP (bitfield, 0);
2364 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2365 && equiv_mem != 0)
2366 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2367 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2368 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2369 memref = SUBREG_REG (XEXP (bitfield, 0));
2370 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2371 && equiv_mem != 0
2372 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2373 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2375 if (memref
2376 && ! mode_dependent_address_p (XEXP (memref, 0))
2377 && ! MEM_VOLATILE_P (memref))
2379 /* Now adjust the address, first for any subreg'ing
2380 that we are now getting rid of,
2381 and then for which byte of the word is wanted. */
2383 register int offset = INTVAL (XEXP (bitfield, 2));
2384 rtx insns;
2386 /* Adjust OFFSET to count bits from low-address byte. */
2387 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2388 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2389 - offset - INTVAL (XEXP (bitfield, 1)));
2391 /* Adjust OFFSET to count bytes from low-address byte. */
2392 offset /= BITS_PER_UNIT;
2393 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2395 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2396 if (BYTES_BIG_ENDIAN)
2397 offset -= (MIN (UNITS_PER_WORD,
2398 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2399 - MIN (UNITS_PER_WORD,
2400 GET_MODE_SIZE (GET_MODE (memref))));
2403 start_sequence ();
2404 memref = change_address (memref, mode,
2405 plus_constant (XEXP (memref, 0), offset));
2406 insns = get_insns ();
2407 end_sequence ();
2408 emit_insns_before (insns, insn);
2410 /* Store this memory reference where
2411 we found the bit field reference. */
2413 if (destflag)
2415 validate_change (insn, &SET_DEST (body), memref, 1);
2416 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2418 rtx src = SET_SRC (body);
2419 while (GET_CODE (src) == SUBREG
2420 && SUBREG_WORD (src) == 0)
2421 src = SUBREG_REG (src);
2422 if (GET_MODE (src) != GET_MODE (memref))
2423 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2424 validate_change (insn, &SET_SRC (body), src, 1);
2426 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2427 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2428 /* This shouldn't happen because anything that didn't have
2429 one of these modes should have got converted explicitly
2430 and then referenced through a subreg.
2431 This is so because the original bit-field was
2432 handled by agg_mode and so its tree structure had
2433 the same mode that memref now has. */
2434 abort ();
2436 else
2438 rtx dest = SET_DEST (body);
2440 while (GET_CODE (dest) == SUBREG
2441 && SUBREG_WORD (dest) == 0
2442 && (GET_MODE_CLASS (GET_MODE (dest))
2443 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2444 dest = SUBREG_REG (dest);
2446 validate_change (insn, &SET_DEST (body), dest, 1);
2448 if (GET_MODE (dest) == GET_MODE (memref))
2449 validate_change (insn, &SET_SRC (body), memref, 1);
2450 else
2452 /* Convert the mem ref to the destination mode. */
2453 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2455 start_sequence ();
2456 convert_move (newreg, memref,
2457 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2458 seq = get_insns ();
2459 end_sequence ();
2461 validate_change (insn, &SET_SRC (body), newreg, 1);
2465 /* See if we can convert this extraction or insertion into
2466 a simple move insn. We might not be able to do so if this
2467 was, for example, part of a PARALLEL.
2469 If we succeed, write out any needed conversions. If we fail,
2470 it is hard to guess why we failed, so don't do anything
2471 special; just let the optimization be suppressed. */
2473 if (apply_change_group () && seq)
2474 emit_insns_before (seq, insn);
2479 /* These routines are responsible for converting virtual register references
2480 to the actual hard register references once RTL generation is complete.
2482 The following four variables are used for communication between the
2483 routines. They contain the offsets of the virtual registers from their
2484 respective hard registers. */
2486 static int in_arg_offset;
2487 static int var_offset;
2488 static int dynamic_offset;
2489 static int out_arg_offset;
2491 /* In most machines, the stack pointer register is equivalent to the bottom
2492 of the stack. */
2494 #ifndef STACK_POINTER_OFFSET
2495 #define STACK_POINTER_OFFSET 0
2496 #endif
2498 /* If not defined, pick an appropriate default for the offset of dynamically
2499 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2500 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2502 #ifndef STACK_DYNAMIC_OFFSET
2504 #ifdef ACCUMULATE_OUTGOING_ARGS
2505 /* The bottom of the stack points to the actual arguments. If
2506 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2507 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2508 stack space for register parameters is not pushed by the caller, but
2509 rather part of the fixed stack areas and hence not included in
2510 `current_function_outgoing_args_size'. Nevertheless, we must allow
2511 for it when allocating stack dynamic objects. */
2513 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2514 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2515 (current_function_outgoing_args_size \
2516 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2518 #else
2519 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2520 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2521 #endif
2523 #else
2524 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2525 #endif
2526 #endif
2528 /* Pass through the INSNS of function FNDECL and convert virtual register
2529 references to hard register references. */
2531 void
2532 instantiate_virtual_regs (fndecl, insns)
2533 tree fndecl;
2534 rtx insns;
2536 rtx insn;
2538 /* Compute the offsets to use for this function. */
2539 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2540 var_offset = STARTING_FRAME_OFFSET;
2541 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2542 out_arg_offset = STACK_POINTER_OFFSET;
2544 /* Scan all variables and parameters of this function. For each that is
2545 in memory, instantiate all virtual registers if the result is a valid
2546 address. If not, we do it later. That will handle most uses of virtual
2547 regs on many machines. */
2548 instantiate_decls (fndecl, 1);
2550 /* Initialize recognition, indicating that volatile is OK. */
2551 init_recog ();
2553 /* Scan through all the insns, instantiating every virtual register still
2554 present. */
2555 for (insn = insns; insn; insn = NEXT_INSN (insn))
2556 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2557 || GET_CODE (insn) == CALL_INSN)
2559 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2560 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2563 /* Now instantiate the remaining register equivalences for debugging info.
2564 These will not be valid addresses. */
2565 instantiate_decls (fndecl, 0);
2567 /* Indicate that, from now on, assign_stack_local should use
2568 frame_pointer_rtx. */
2569 virtuals_instantiated = 1;
2572 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2573 all virtual registers in their DECL_RTL's.
2575 If VALID_ONLY, do this only if the resulting address is still valid.
2576 Otherwise, always do it. */
2578 static void
2579 instantiate_decls (fndecl, valid_only)
2580 tree fndecl;
2581 int valid_only;
2583 tree decl;
2585 if (DECL_SAVED_INSNS (fndecl))
2586 /* When compiling an inline function, the obstack used for
2587 rtl allocation is the maybepermanent_obstack. Calling
2588 `resume_temporary_allocation' switches us back to that
2589 obstack while we process this function's parameters. */
2590 resume_temporary_allocation ();
2592 /* Process all parameters of the function. */
2593 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2595 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2596 valid_only);
2597 instantiate_decl (DECL_INCOMING_RTL (decl),
2598 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2601 /* Now process all variables defined in the function or its subblocks. */
2602 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2604 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2606 /* Save all rtl allocated for this function by raising the
2607 high-water mark on the maybepermanent_obstack. */
2608 preserve_data ();
2609 /* All further rtl allocation is now done in the current_obstack. */
2610 rtl_in_current_obstack ();
2614 /* Subroutine of instantiate_decls: Process all decls in the given
2615 BLOCK node and all its subblocks. */
2617 static void
2618 instantiate_decls_1 (let, valid_only)
2619 tree let;
2620 int valid_only;
2622 tree t;
2624 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2625 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2626 valid_only);
2628 /* Process all subblocks. */
2629 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2630 instantiate_decls_1 (t, valid_only);
2633 /* Subroutine of the preceding procedures: Given RTL representing a
2634 decl and the size of the object, do any instantiation required.
2636 If VALID_ONLY is non-zero, it means that the RTL should only be
2637 changed if the new address is valid. */
2639 static void
2640 instantiate_decl (x, size, valid_only)
2641 rtx x;
2642 int size;
2643 int valid_only;
2645 enum machine_mode mode;
2646 rtx addr;
2648 /* If this is not a MEM, no need to do anything. Similarly if the
2649 address is a constant or a register that is not a virtual register. */
2651 if (x == 0 || GET_CODE (x) != MEM)
2652 return;
2654 addr = XEXP (x, 0);
2655 if (CONSTANT_P (addr)
2656 || (GET_CODE (addr) == REG
2657 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2658 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2659 return;
2661 /* If we should only do this if the address is valid, copy the address.
2662 We need to do this so we can undo any changes that might make the
2663 address invalid. This copy is unfortunate, but probably can't be
2664 avoided. */
2666 if (valid_only)
2667 addr = copy_rtx (addr);
2669 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2671 if (! valid_only)
2672 return;
2674 /* Now verify that the resulting address is valid for every integer or
2675 floating-point mode up to and including SIZE bytes long. We do this
2676 since the object might be accessed in any mode and frame addresses
2677 are shared. */
2679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2680 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2681 mode = GET_MODE_WIDER_MODE (mode))
2682 if (! memory_address_p (mode, addr))
2683 return;
2685 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2686 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2687 mode = GET_MODE_WIDER_MODE (mode))
2688 if (! memory_address_p (mode, addr))
2689 return;
2691 /* Otherwise, put back the address, now that we have updated it and we
2692 know it is valid. */
2694 XEXP (x, 0) = addr;
2697 /* Given a pointer to a piece of rtx and an optional pointer to the
2698 containing object, instantiate any virtual registers present in it.
2700 If EXTRA_INSNS, we always do the replacement and generate
2701 any extra insns before OBJECT. If it zero, we do nothing if replacement
2702 is not valid.
2704 Return 1 if we either had nothing to do or if we were able to do the
2705 needed replacement. Return 0 otherwise; we only return zero if
2706 EXTRA_INSNS is zero.
2708 We first try some simple transformations to avoid the creation of extra
2709 pseudos. */
2711 static int
2712 instantiate_virtual_regs_1 (loc, object, extra_insns)
2713 rtx *loc;
2714 rtx object;
2715 int extra_insns;
2717 rtx x;
2718 RTX_CODE code;
2719 rtx new = 0;
2720 int offset;
2721 rtx temp;
2722 rtx seq;
2723 int i, j;
2724 char *fmt;
2726 /* Re-start here to avoid recursion in common cases. */
2727 restart:
2729 x = *loc;
2730 if (x == 0)
2731 return 1;
2733 code = GET_CODE (x);
2735 /* Check for some special cases. */
2736 switch (code)
2738 case CONST_INT:
2739 case CONST_DOUBLE:
2740 case CONST:
2741 case SYMBOL_REF:
2742 case CODE_LABEL:
2743 case PC:
2744 case CC0:
2745 case ASM_INPUT:
2746 case ADDR_VEC:
2747 case ADDR_DIFF_VEC:
2748 case RETURN:
2749 return 1;
2751 case SET:
2752 /* We are allowed to set the virtual registers. This means that
2753 that the actual register should receive the source minus the
2754 appropriate offset. This is used, for example, in the handling
2755 of non-local gotos. */
2756 if (SET_DEST (x) == virtual_incoming_args_rtx)
2757 new = arg_pointer_rtx, offset = - in_arg_offset;
2758 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2759 new = frame_pointer_rtx, offset = - var_offset;
2760 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2761 new = stack_pointer_rtx, offset = - dynamic_offset;
2762 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2763 new = stack_pointer_rtx, offset = - out_arg_offset;
2765 if (new)
2767 /* The only valid sources here are PLUS or REG. Just do
2768 the simplest possible thing to handle them. */
2769 if (GET_CODE (SET_SRC (x)) != REG
2770 && GET_CODE (SET_SRC (x)) != PLUS)
2771 abort ();
2773 start_sequence ();
2774 if (GET_CODE (SET_SRC (x)) != REG)
2775 temp = force_operand (SET_SRC (x), NULL_RTX);
2776 else
2777 temp = SET_SRC (x);
2778 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2779 seq = get_insns ();
2780 end_sequence ();
2782 emit_insns_before (seq, object);
2783 SET_DEST (x) = new;
2785 if (!validate_change (object, &SET_SRC (x), temp, 0)
2786 || ! extra_insns)
2787 abort ();
2789 return 1;
2792 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2793 loc = &SET_SRC (x);
2794 goto restart;
2796 case PLUS:
2797 /* Handle special case of virtual register plus constant. */
2798 if (CONSTANT_P (XEXP (x, 1)))
2800 rtx old, new_offset;
2802 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2803 if (GET_CODE (XEXP (x, 0)) == PLUS)
2805 rtx inner = XEXP (XEXP (x, 0), 0);
2807 if (inner == virtual_incoming_args_rtx)
2808 new = arg_pointer_rtx, offset = in_arg_offset;
2809 else if (inner == virtual_stack_vars_rtx)
2810 new = frame_pointer_rtx, offset = var_offset;
2811 else if (inner == virtual_stack_dynamic_rtx)
2812 new = stack_pointer_rtx, offset = dynamic_offset;
2813 else if (inner == virtual_outgoing_args_rtx)
2814 new = stack_pointer_rtx, offset = out_arg_offset;
2815 else
2817 loc = &XEXP (x, 0);
2818 goto restart;
2821 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2822 extra_insns);
2823 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2826 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2827 new = arg_pointer_rtx, offset = in_arg_offset;
2828 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2829 new = frame_pointer_rtx, offset = var_offset;
2830 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2831 new = stack_pointer_rtx, offset = dynamic_offset;
2832 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2833 new = stack_pointer_rtx, offset = out_arg_offset;
2834 else
2836 /* We know the second operand is a constant. Unless the
2837 first operand is a REG (which has been already checked),
2838 it needs to be checked. */
2839 if (GET_CODE (XEXP (x, 0)) != REG)
2841 loc = &XEXP (x, 0);
2842 goto restart;
2844 return 1;
2847 new_offset = plus_constant (XEXP (x, 1), offset);
2849 /* If the new constant is zero, try to replace the sum with just
2850 the register. */
2851 if (new_offset == const0_rtx
2852 && validate_change (object, loc, new, 0))
2853 return 1;
2855 /* Next try to replace the register and new offset.
2856 There are two changes to validate here and we can't assume that
2857 in the case of old offset equals new just changing the register
2858 will yield a valid insn. In the interests of a little efficiency,
2859 however, we only call validate change once (we don't queue up the
2860 changes and then call apply_change_group). */
2862 old = XEXP (x, 0);
2863 if (offset == 0
2864 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2865 : (XEXP (x, 0) = new,
2866 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2868 if (! extra_insns)
2870 XEXP (x, 0) = old;
2871 return 0;
2874 /* Otherwise copy the new constant into a register and replace
2875 constant with that register. */
2876 temp = gen_reg_rtx (Pmode);
2877 XEXP (x, 0) = new;
2878 if (validate_change (object, &XEXP (x, 1), temp, 0))
2879 emit_insn_before (gen_move_insn (temp, new_offset), object);
2880 else
2882 /* If that didn't work, replace this expression with a
2883 register containing the sum. */
2885 XEXP (x, 0) = old;
2886 new = gen_rtx (PLUS, Pmode, new, new_offset);
2888 start_sequence ();
2889 temp = force_operand (new, NULL_RTX);
2890 seq = get_insns ();
2891 end_sequence ();
2893 emit_insns_before (seq, object);
2894 if (! validate_change (object, loc, temp, 0)
2895 && ! validate_replace_rtx (x, temp, object))
2896 abort ();
2900 return 1;
2903 /* Fall through to generic two-operand expression case. */
2904 case EXPR_LIST:
2905 case CALL:
2906 case COMPARE:
2907 case MINUS:
2908 case MULT:
2909 case DIV: case UDIV:
2910 case MOD: case UMOD:
2911 case AND: case IOR: case XOR:
2912 case ROTATERT: case ROTATE:
2913 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2914 case NE: case EQ:
2915 case GE: case GT: case GEU: case GTU:
2916 case LE: case LT: case LEU: case LTU:
2917 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2918 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2919 loc = &XEXP (x, 0);
2920 goto restart;
2922 case MEM:
2923 /* Most cases of MEM that convert to valid addresses have already been
2924 handled by our scan of regno_reg_rtx. The only special handling we
2925 need here is to make a copy of the rtx to ensure it isn't being
2926 shared if we have to change it to a pseudo.
2928 If the rtx is a simple reference to an address via a virtual register,
2929 it can potentially be shared. In such cases, first try to make it
2930 a valid address, which can also be shared. Otherwise, copy it and
2931 proceed normally.
2933 First check for common cases that need no processing. These are
2934 usually due to instantiation already being done on a previous instance
2935 of a shared rtx. */
2937 temp = XEXP (x, 0);
2938 if (CONSTANT_ADDRESS_P (temp)
2939 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2940 || temp == arg_pointer_rtx
2941 #endif
2942 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2943 || temp == hard_frame_pointer_rtx
2944 #endif
2945 || temp == frame_pointer_rtx)
2946 return 1;
2948 if (GET_CODE (temp) == PLUS
2949 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2950 && (XEXP (temp, 0) == frame_pointer_rtx
2951 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2952 || XEXP (temp, 0) == hard_frame_pointer_rtx
2953 #endif
2954 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2955 || XEXP (temp, 0) == arg_pointer_rtx
2956 #endif
2958 return 1;
2960 if (temp == virtual_stack_vars_rtx
2961 || temp == virtual_incoming_args_rtx
2962 || (GET_CODE (temp) == PLUS
2963 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2964 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2965 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2967 /* This MEM may be shared. If the substitution can be done without
2968 the need to generate new pseudos, we want to do it in place
2969 so all copies of the shared rtx benefit. The call below will
2970 only make substitutions if the resulting address is still
2971 valid.
2973 Note that we cannot pass X as the object in the recursive call
2974 since the insn being processed may not allow all valid
2975 addresses. However, if we were not passed on object, we can
2976 only modify X without copying it if X will have a valid
2977 address.
2979 ??? Also note that this can still lose if OBJECT is an insn that
2980 has less restrictions on an address that some other insn.
2981 In that case, we will modify the shared address. This case
2982 doesn't seem very likely, though. */
2984 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2985 object ? object : x, 0))
2986 return 1;
2988 /* Otherwise make a copy and process that copy. We copy the entire
2989 RTL expression since it might be a PLUS which could also be
2990 shared. */
2991 *loc = x = copy_rtx (x);
2994 /* Fall through to generic unary operation case. */
2995 case USE:
2996 case CLOBBER:
2997 case SUBREG:
2998 case STRICT_LOW_PART:
2999 case NEG: case NOT:
3000 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3001 case SIGN_EXTEND: case ZERO_EXTEND:
3002 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3003 case FLOAT: case FIX:
3004 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3005 case ABS:
3006 case SQRT:
3007 case FFS:
3008 /* These case either have just one operand or we know that we need not
3009 check the rest of the operands. */
3010 loc = &XEXP (x, 0);
3011 goto restart;
3013 case REG:
3014 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3015 in front of this insn and substitute the temporary. */
3016 if (x == virtual_incoming_args_rtx)
3017 new = arg_pointer_rtx, offset = in_arg_offset;
3018 else if (x == virtual_stack_vars_rtx)
3019 new = frame_pointer_rtx, offset = var_offset;
3020 else if (x == virtual_stack_dynamic_rtx)
3021 new = stack_pointer_rtx, offset = dynamic_offset;
3022 else if (x == virtual_outgoing_args_rtx)
3023 new = stack_pointer_rtx, offset = out_arg_offset;
3025 if (new)
3027 temp = plus_constant (new, offset);
3028 if (!validate_change (object, loc, temp, 0))
3030 if (! extra_insns)
3031 return 0;
3033 start_sequence ();
3034 temp = force_operand (temp, NULL_RTX);
3035 seq = get_insns ();
3036 end_sequence ();
3038 emit_insns_before (seq, object);
3039 if (! validate_change (object, loc, temp, 0)
3040 && ! validate_replace_rtx (x, temp, object))
3041 abort ();
3045 return 1;
3048 /* Scan all subexpressions. */
3049 fmt = GET_RTX_FORMAT (code);
3050 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3051 if (*fmt == 'e')
3053 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3054 return 0;
3056 else if (*fmt == 'E')
3057 for (j = 0; j < XVECLEN (x, i); j++)
3058 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3059 extra_insns))
3060 return 0;
3062 return 1;
3065 /* Optimization: assuming this function does not receive nonlocal gotos,
3066 delete the handlers for such, as well as the insns to establish
3067 and disestablish them. */
3069 static void
3070 delete_handlers ()
3072 rtx insn;
3073 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3075 /* Delete the handler by turning off the flag that would
3076 prevent jump_optimize from deleting it.
3077 Also permit deletion of the nonlocal labels themselves
3078 if nothing local refers to them. */
3079 if (GET_CODE (insn) == CODE_LABEL)
3081 tree t, last_t;
3083 LABEL_PRESERVE_P (insn) = 0;
3085 /* Remove it from the nonlocal_label list, to avoid confusing
3086 flow. */
3087 for (t = nonlocal_labels, last_t = 0; t;
3088 last_t = t, t = TREE_CHAIN (t))
3089 if (DECL_RTL (TREE_VALUE (t)) == insn)
3090 break;
3091 if (t)
3093 if (! last_t)
3094 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3095 else
3096 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3099 if (GET_CODE (insn) == INSN
3100 && ((nonlocal_goto_handler_slot != 0
3101 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3102 || (nonlocal_goto_stack_level != 0
3103 && reg_mentioned_p (nonlocal_goto_stack_level,
3104 PATTERN (insn)))))
3105 delete_insn (insn);
3109 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3110 of the current function. */
3113 nonlocal_label_rtx_list ()
3115 tree t;
3116 rtx x = 0;
3118 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3119 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3121 return x;
3124 /* Output a USE for any register use in RTL.
3125 This is used with -noreg to mark the extent of lifespan
3126 of any registers used in a user-visible variable's DECL_RTL. */
3128 void
3129 use_variable (rtl)
3130 rtx rtl;
3132 if (GET_CODE (rtl) == REG)
3133 /* This is a register variable. */
3134 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3135 else if (GET_CODE (rtl) == MEM
3136 && GET_CODE (XEXP (rtl, 0)) == REG
3137 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3138 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3139 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3140 /* This is a variable-sized structure. */
3141 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3144 /* Like use_variable except that it outputs the USEs after INSN
3145 instead of at the end of the insn-chain. */
3147 void
3148 use_variable_after (rtl, insn)
3149 rtx rtl, insn;
3151 if (GET_CODE (rtl) == REG)
3152 /* This is a register variable. */
3153 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3154 else if (GET_CODE (rtl) == MEM
3155 && GET_CODE (XEXP (rtl, 0)) == REG
3156 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3157 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3158 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3159 /* This is a variable-sized structure. */
3160 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3164 max_parm_reg_num ()
3166 return max_parm_reg;
3169 /* Return the first insn following those generated by `assign_parms'. */
3172 get_first_nonparm_insn ()
3174 if (last_parm_insn)
3175 return NEXT_INSN (last_parm_insn);
3176 return get_insns ();
3179 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3180 Crash if there is none. */
3183 get_first_block_beg ()
3185 register rtx searcher;
3186 register rtx insn = get_first_nonparm_insn ();
3188 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3189 if (GET_CODE (searcher) == NOTE
3190 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3191 return searcher;
3193 abort (); /* Invalid call to this function. (See comments above.) */
3194 return NULL_RTX;
3197 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3198 This means a type for which function calls must pass an address to the
3199 function or get an address back from the function.
3200 EXP may be a type node or an expression (whose type is tested). */
3203 aggregate_value_p (exp)
3204 tree exp;
3206 int i, regno, nregs;
3207 rtx reg;
3208 tree type;
3209 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3210 type = exp;
3211 else
3212 type = TREE_TYPE (exp);
3214 if (RETURN_IN_MEMORY (type))
3215 return 1;
3216 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3217 and thus can't be returned in registers. */
3218 if (TREE_ADDRESSABLE (type))
3219 return 1;
3220 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3221 return 1;
3222 /* Make sure we have suitable call-clobbered regs to return
3223 the value in; if not, we must return it in memory. */
3224 reg = hard_function_value (type, 0);
3226 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3227 it is OK. */
3228 if (GET_CODE (reg) != REG)
3229 return 0;
3231 regno = REGNO (reg);
3232 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3233 for (i = 0; i < nregs; i++)
3234 if (! call_used_regs[regno + i])
3235 return 1;
3236 return 0;
3239 /* Assign RTL expressions to the function's parameters.
3240 This may involve copying them into registers and using
3241 those registers as the RTL for them.
3243 If SECOND_TIME is non-zero it means that this function is being
3244 called a second time. This is done by integrate.c when a function's
3245 compilation is deferred. We need to come back here in case the
3246 FUNCTION_ARG macro computes items needed for the rest of the compilation
3247 (such as changing which registers are fixed or caller-saved). But suppress
3248 writing any insns or setting DECL_RTL of anything in this case. */
3250 void
3251 assign_parms (fndecl, second_time)
3252 tree fndecl;
3253 int second_time;
3255 register tree parm;
3256 register rtx entry_parm = 0;
3257 register rtx stack_parm = 0;
3258 CUMULATIVE_ARGS args_so_far;
3259 enum machine_mode promoted_mode, passed_mode;
3260 enum machine_mode nominal_mode, promoted_nominal_mode;
3261 int unsignedp;
3262 /* Total space needed so far for args on the stack,
3263 given as a constant and a tree-expression. */
3264 struct args_size stack_args_size;
3265 tree fntype = TREE_TYPE (fndecl);
3266 tree fnargs = DECL_ARGUMENTS (fndecl);
3267 /* This is used for the arg pointer when referring to stack args. */
3268 rtx internal_arg_pointer;
3269 /* This is a dummy PARM_DECL that we used for the function result if
3270 the function returns a structure. */
3271 tree function_result_decl = 0;
3272 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3273 int varargs_setup = 0;
3274 rtx conversion_insns = 0;
3276 /* Nonzero if the last arg is named `__builtin_va_alist',
3277 which is used on some machines for old-fashioned non-ANSI varargs.h;
3278 this should be stuck onto the stack as if it had arrived there. */
3279 int hide_last_arg
3280 = (current_function_varargs
3281 && fnargs
3282 && (parm = tree_last (fnargs)) != 0
3283 && DECL_NAME (parm)
3284 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3285 "__builtin_va_alist")));
3287 /* Nonzero if function takes extra anonymous args.
3288 This means the last named arg must be on the stack
3289 right before the anonymous ones. */
3290 int stdarg
3291 = (TYPE_ARG_TYPES (fntype) != 0
3292 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3293 != void_type_node));
3295 current_function_stdarg = stdarg;
3297 /* If the reg that the virtual arg pointer will be translated into is
3298 not a fixed reg or is the stack pointer, make a copy of the virtual
3299 arg pointer, and address parms via the copy. The frame pointer is
3300 considered fixed even though it is not marked as such.
3302 The second time through, simply use ap to avoid generating rtx. */
3304 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3305 || ! (fixed_regs[ARG_POINTER_REGNUM]
3306 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3307 && ! second_time)
3308 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3309 else
3310 internal_arg_pointer = virtual_incoming_args_rtx;
3311 current_function_internal_arg_pointer = internal_arg_pointer;
3313 stack_args_size.constant = 0;
3314 stack_args_size.var = 0;
3316 /* If struct value address is treated as the first argument, make it so. */
3317 if (aggregate_value_p (DECL_RESULT (fndecl))
3318 && ! current_function_returns_pcc_struct
3319 && struct_value_incoming_rtx == 0)
3321 tree type = build_pointer_type (TREE_TYPE (fntype));
3323 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3325 DECL_ARG_TYPE (function_result_decl) = type;
3326 TREE_CHAIN (function_result_decl) = fnargs;
3327 fnargs = function_result_decl;
3330 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3331 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3333 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3334 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3335 #else
3336 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3337 #endif
3339 /* We haven't yet found an argument that we must push and pretend the
3340 caller did. */
3341 current_function_pretend_args_size = 0;
3343 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3345 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3346 struct args_size stack_offset;
3347 struct args_size arg_size;
3348 int passed_pointer = 0;
3349 int did_conversion = 0;
3350 tree passed_type = DECL_ARG_TYPE (parm);
3351 tree nominal_type = TREE_TYPE (parm);
3353 /* Set LAST_NAMED if this is last named arg before some
3354 anonymous args. We treat it as if it were anonymous too. */
3355 int last_named = ((TREE_CHAIN (parm) == 0
3356 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3357 && (stdarg || current_function_varargs));
3359 if (TREE_TYPE (parm) == error_mark_node
3360 /* This can happen after weird syntax errors
3361 or if an enum type is defined among the parms. */
3362 || TREE_CODE (parm) != PARM_DECL
3363 || passed_type == NULL)
3365 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3366 const0_rtx);
3367 TREE_USED (parm) = 1;
3368 continue;
3371 /* For varargs.h function, save info about regs and stack space
3372 used by the individual args, not including the va_alist arg. */
3373 if (hide_last_arg && last_named)
3374 current_function_args_info = args_so_far;
3376 /* Find mode of arg as it is passed, and mode of arg
3377 as it should be during execution of this function. */
3378 passed_mode = TYPE_MODE (passed_type);
3379 nominal_mode = TYPE_MODE (nominal_type);
3381 /* If the parm's mode is VOID, its value doesn't matter,
3382 and avoid the usual things like emit_move_insn that could crash. */
3383 if (nominal_mode == VOIDmode)
3385 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3386 continue;
3389 /* If the parm is to be passed as a transparent union, use the
3390 type of the first field for the tests below. We have already
3391 verified that the modes are the same. */
3392 if (DECL_TRANSPARENT_UNION (parm)
3393 || TYPE_TRANSPARENT_UNION (passed_type))
3394 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3396 /* See if this arg was passed by invisible reference. It is if
3397 it is an object whose size depends on the contents of the
3398 object itself or if the machine requires these objects be passed
3399 that way. */
3401 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3402 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3403 || TREE_ADDRESSABLE (passed_type)
3404 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3405 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3406 passed_type, ! last_named)
3407 #endif
3410 passed_type = nominal_type = build_pointer_type (passed_type);
3411 passed_pointer = 1;
3412 passed_mode = nominal_mode = Pmode;
3415 promoted_mode = passed_mode;
3417 #ifdef PROMOTE_FUNCTION_ARGS
3418 /* Compute the mode in which the arg is actually extended to. */
3419 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3420 #endif
3422 /* Let machine desc say which reg (if any) the parm arrives in.
3423 0 means it arrives on the stack. */
3424 #ifdef FUNCTION_INCOMING_ARG
3425 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3426 passed_type, ! last_named);
3427 #else
3428 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3429 passed_type, ! last_named);
3430 #endif
3432 if (entry_parm == 0)
3433 promoted_mode = passed_mode;
3435 #ifdef SETUP_INCOMING_VARARGS
3436 /* If this is the last named parameter, do any required setup for
3437 varargs or stdargs. We need to know about the case of this being an
3438 addressable type, in which case we skip the registers it
3439 would have arrived in.
3441 For stdargs, LAST_NAMED will be set for two parameters, the one that
3442 is actually the last named, and the dummy parameter. We only
3443 want to do this action once.
3445 Also, indicate when RTL generation is to be suppressed. */
3446 if (last_named && !varargs_setup)
3448 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3449 current_function_pretend_args_size,
3450 second_time);
3451 varargs_setup = 1;
3453 #endif
3455 /* Determine parm's home in the stack,
3456 in case it arrives in the stack or we should pretend it did.
3458 Compute the stack position and rtx where the argument arrives
3459 and its size.
3461 There is one complexity here: If this was a parameter that would
3462 have been passed in registers, but wasn't only because it is
3463 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3464 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3465 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3466 0 as it was the previous time. */
3468 locate_and_pad_parm (promoted_mode, passed_type,
3469 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3471 #else
3472 #ifdef FUNCTION_INCOMING_ARG
3473 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3474 passed_type,
3475 (! last_named
3476 || varargs_setup)) != 0,
3477 #else
3478 FUNCTION_ARG (args_so_far, promoted_mode,
3479 passed_type,
3480 ! last_named || varargs_setup) != 0,
3481 #endif
3482 #endif
3483 fndecl, &stack_args_size, &stack_offset, &arg_size);
3485 if (! second_time)
3487 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3489 if (offset_rtx == const0_rtx)
3490 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3491 else
3492 stack_parm = gen_rtx (MEM, promoted_mode,
3493 gen_rtx (PLUS, Pmode,
3494 internal_arg_pointer, offset_rtx));
3496 /* If this is a memory ref that contains aggregate components,
3497 mark it as such for cse and loop optimize. Likewise if it
3498 is readonly. */
3499 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3500 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3503 /* If this parameter was passed both in registers and in the stack,
3504 use the copy on the stack. */
3505 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3506 entry_parm = 0;
3508 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3509 /* If this parm was passed part in regs and part in memory,
3510 pretend it arrived entirely in memory
3511 by pushing the register-part onto the stack.
3513 In the special case of a DImode or DFmode that is split,
3514 we could put it together in a pseudoreg directly,
3515 but for now that's not worth bothering with. */
3517 if (entry_parm)
3519 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3520 passed_type, ! last_named);
3522 if (nregs > 0)
3524 current_function_pretend_args_size
3525 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3526 / (PARM_BOUNDARY / BITS_PER_UNIT)
3527 * (PARM_BOUNDARY / BITS_PER_UNIT));
3529 if (! second_time)
3531 /* Handle calls that pass values in multiple non-contiguous
3532 locations. The Irix 6 ABI has examples of this. */
3533 if (GET_CODE (entry_parm) == PARALLEL)
3534 emit_group_store (validize_mem (stack_parm),
3535 entry_parm);
3536 else
3537 move_block_from_reg (REGNO (entry_parm),
3538 validize_mem (stack_parm), nregs,
3539 int_size_in_bytes (TREE_TYPE (parm)));
3541 entry_parm = stack_parm;
3544 #endif
3546 /* If we didn't decide this parm came in a register,
3547 by default it came on the stack. */
3548 if (entry_parm == 0)
3549 entry_parm = stack_parm;
3551 /* Record permanently how this parm was passed. */
3552 if (! second_time)
3553 DECL_INCOMING_RTL (parm) = entry_parm;
3555 /* If there is actually space on the stack for this parm,
3556 count it in stack_args_size; otherwise set stack_parm to 0
3557 to indicate there is no preallocated stack slot for the parm. */
3559 if (entry_parm == stack_parm
3560 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3561 /* On some machines, even if a parm value arrives in a register
3562 there is still an (uninitialized) stack slot allocated for it.
3564 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3565 whether this parameter already has a stack slot allocated,
3566 because an arg block exists only if current_function_args_size
3567 is larger than some threshold, and we haven't calculated that
3568 yet. So, for now, we just assume that stack slots never exist
3569 in this case. */
3570 || REG_PARM_STACK_SPACE (fndecl) > 0
3571 #endif
3574 stack_args_size.constant += arg_size.constant;
3575 if (arg_size.var)
3576 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3578 else
3579 /* No stack slot was pushed for this parm. */
3580 stack_parm = 0;
3582 /* Update info on where next arg arrives in registers. */
3584 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3585 passed_type, ! last_named);
3587 /* If this is our second time through, we are done with this parm. */
3588 if (second_time)
3589 continue;
3591 /* If we can't trust the parm stack slot to be aligned enough
3592 for its ultimate type, don't use that slot after entry.
3593 We'll make another stack slot, if we need one. */
3595 int thisparm_boundary
3596 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3598 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3599 stack_parm = 0;
3602 /* If parm was passed in memory, and we need to convert it on entry,
3603 don't store it back in that same slot. */
3604 if (entry_parm != 0
3605 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3606 stack_parm = 0;
3608 #if 0
3609 /* Now adjust STACK_PARM to the mode and precise location
3610 where this parameter should live during execution,
3611 if we discover that it must live in the stack during execution.
3612 To make debuggers happier on big-endian machines, we store
3613 the value in the last bytes of the space available. */
3615 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3616 && stack_parm != 0)
3618 rtx offset_rtx;
3620 if (BYTES_BIG_ENDIAN
3621 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3622 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3623 - GET_MODE_SIZE (nominal_mode));
3625 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3626 if (offset_rtx == const0_rtx)
3627 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3628 else
3629 stack_parm = gen_rtx (MEM, nominal_mode,
3630 gen_rtx (PLUS, Pmode,
3631 internal_arg_pointer, offset_rtx));
3633 /* If this is a memory ref that contains aggregate components,
3634 mark it as such for cse and loop optimize. */
3635 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3637 #endif /* 0 */
3639 #ifdef STACK_REGS
3640 /* We need this "use" info, because the gcc-register->stack-register
3641 converter in reg-stack.c needs to know which registers are active
3642 at the start of the function call. The actual parameter loading
3643 instructions are not always available then anymore, since they might
3644 have been optimised away. */
3646 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3647 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3648 #endif
3650 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3651 in the mode in which it arrives.
3652 STACK_PARM is an RTX for a stack slot where the parameter can live
3653 during the function (in case we want to put it there).
3654 STACK_PARM is 0 if no stack slot was pushed for it.
3656 Now output code if necessary to convert ENTRY_PARM to
3657 the type in which this function declares it,
3658 and store that result in an appropriate place,
3659 which may be a pseudo reg, may be STACK_PARM,
3660 or may be a local stack slot if STACK_PARM is 0.
3662 Set DECL_RTL to that place. */
3664 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3666 /* If a BLKmode arrives in registers, copy it to a stack slot.
3667 Handle calls that pass values in multiple non-contiguous
3668 locations. The Irix 6 ABI has examples of this. */
3669 if (GET_CODE (entry_parm) == REG
3670 || GET_CODE (entry_parm) == PARALLEL)
3672 int size_stored
3673 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3674 UNITS_PER_WORD);
3676 /* Note that we will be storing an integral number of words.
3677 So we have to be careful to ensure that we allocate an
3678 integral number of words. We do this below in the
3679 assign_stack_local if space was not allocated in the argument
3680 list. If it was, this will not work if PARM_BOUNDARY is not
3681 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3682 if it becomes a problem. */
3684 if (stack_parm == 0)
3686 stack_parm
3687 = assign_stack_local (GET_MODE (entry_parm),
3688 size_stored, 0);
3690 /* If this is a memory ref that contains aggregate
3691 components, mark it as such for cse and loop optimize. */
3692 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3695 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3696 abort ();
3698 if (TREE_READONLY (parm))
3699 RTX_UNCHANGING_P (stack_parm) = 1;
3701 /* Handle calls that pass values in multiple non-contiguous
3702 locations. The Irix 6 ABI has examples of this. */
3703 if (GET_CODE (entry_parm) == PARALLEL)
3704 emit_group_store (validize_mem (stack_parm), entry_parm);
3705 else
3706 move_block_from_reg (REGNO (entry_parm),
3707 validize_mem (stack_parm),
3708 size_stored / UNITS_PER_WORD,
3709 int_size_in_bytes (TREE_TYPE (parm)));
3711 DECL_RTL (parm) = stack_parm;
3713 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3714 && ! DECL_INLINE (fndecl))
3715 /* layout_decl may set this. */
3716 || TREE_ADDRESSABLE (parm)
3717 || TREE_SIDE_EFFECTS (parm)
3718 /* If -ffloat-store specified, don't put explicit
3719 float variables into registers. */
3720 || (flag_float_store
3721 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3722 /* Always assign pseudo to structure return or item passed
3723 by invisible reference. */
3724 || passed_pointer || parm == function_result_decl)
3726 /* Store the parm in a pseudoregister during the function, but we
3727 may need to do it in a wider mode. */
3729 register rtx parmreg;
3730 int regno, regnoi, regnor;
3732 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3734 promoted_nominal_mode
3735 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3737 parmreg = gen_reg_rtx (promoted_nominal_mode);
3738 REG_USERVAR_P (parmreg) = 1;
3740 /* If this was an item that we received a pointer to, set DECL_RTL
3741 appropriately. */
3742 if (passed_pointer)
3744 DECL_RTL (parm)
3745 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3746 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3748 else
3749 DECL_RTL (parm) = parmreg;
3751 /* Copy the value into the register. */
3752 if (nominal_mode != passed_mode
3753 || promoted_nominal_mode != promoted_mode)
3755 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3756 mode, by the caller. We now have to convert it to
3757 NOMINAL_MODE, if different. However, PARMREG may be in
3758 a diffent mode than NOMINAL_MODE if it is being stored
3759 promoted.
3761 If ENTRY_PARM is a hard register, it might be in a register
3762 not valid for operating in its mode (e.g., an odd-numbered
3763 register for a DFmode). In that case, moves are the only
3764 thing valid, so we can't do a convert from there. This
3765 occurs when the calling sequence allow such misaligned
3766 usages.
3768 In addition, the conversion may involve a call, which could
3769 clobber parameters which haven't been copied to pseudo
3770 registers yet. Therefore, we must first copy the parm to
3771 a pseudo reg here, and save the conversion until after all
3772 parameters have been moved. */
3774 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3776 emit_move_insn (tempreg, validize_mem (entry_parm));
3778 push_to_sequence (conversion_insns);
3779 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3781 expand_assignment (parm,
3782 make_tree (nominal_type, tempreg), 0, 0);
3783 conversion_insns = get_insns ();
3784 did_conversion = 1;
3785 end_sequence ();
3787 else
3788 emit_move_insn (parmreg, validize_mem (entry_parm));
3790 /* If we were passed a pointer but the actual value
3791 can safely live in a register, put it in one. */
3792 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3793 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3794 && ! DECL_INLINE (fndecl))
3795 /* layout_decl may set this. */
3796 || TREE_ADDRESSABLE (parm)
3797 || TREE_SIDE_EFFECTS (parm)
3798 /* If -ffloat-store specified, don't put explicit
3799 float variables into registers. */
3800 || (flag_float_store
3801 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3803 /* We can't use nominal_mode, because it will have been set to
3804 Pmode above. We must use the actual mode of the parm. */
3805 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3806 REG_USERVAR_P (parmreg) = 1;
3807 emit_move_insn (parmreg, DECL_RTL (parm));
3808 DECL_RTL (parm) = parmreg;
3809 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3810 now the parm. */
3811 stack_parm = 0;
3813 #ifdef FUNCTION_ARG_CALLEE_COPIES
3814 /* If we are passed an arg by reference and it is our responsibility
3815 to make a copy, do it now.
3816 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3817 original argument, so we must recreate them in the call to
3818 FUNCTION_ARG_CALLEE_COPIES. */
3819 /* ??? Later add code to handle the case that if the argument isn't
3820 modified, don't do the copy. */
3822 else if (passed_pointer
3823 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3824 TYPE_MODE (DECL_ARG_TYPE (parm)),
3825 DECL_ARG_TYPE (parm),
3826 ! last_named)
3827 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3829 rtx copy;
3830 tree type = DECL_ARG_TYPE (parm);
3832 /* This sequence may involve a library call perhaps clobbering
3833 registers that haven't been copied to pseudos yet. */
3835 push_to_sequence (conversion_insns);
3837 if (TYPE_SIZE (type) == 0
3838 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3839 /* This is a variable sized object. */
3840 copy = gen_rtx (MEM, BLKmode,
3841 allocate_dynamic_stack_space
3842 (expr_size (parm), NULL_RTX,
3843 TYPE_ALIGN (type)));
3844 else
3845 copy = assign_stack_temp (TYPE_MODE (type),
3846 int_size_in_bytes (type), 1);
3847 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3849 store_expr (parm, copy, 0);
3850 emit_move_insn (parmreg, XEXP (copy, 0));
3851 conversion_insns = get_insns ();
3852 did_conversion = 1;
3853 end_sequence ();
3855 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3857 /* In any case, record the parm's desired stack location
3858 in case we later discover it must live in the stack.
3860 If it is a COMPLEX value, store the stack location for both
3861 halves. */
3863 if (GET_CODE (parmreg) == CONCAT)
3864 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3865 else
3866 regno = REGNO (parmreg);
3868 if (regno >= nparmregs)
3870 rtx *new;
3871 int old_nparmregs = nparmregs;
3873 nparmregs = regno + 5;
3874 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3875 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3876 old_nparmregs * sizeof (rtx));
3877 bzero ((char *) (new + old_nparmregs),
3878 (nparmregs - old_nparmregs) * sizeof (rtx));
3879 parm_reg_stack_loc = new;
3882 if (GET_CODE (parmreg) == CONCAT)
3884 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3886 regnor = REGNO (gen_realpart (submode, parmreg));
3887 regnoi = REGNO (gen_imagpart (submode, parmreg));
3889 if (stack_parm != 0)
3891 parm_reg_stack_loc[regnor]
3892 = gen_realpart (submode, stack_parm);
3893 parm_reg_stack_loc[regnoi]
3894 = gen_imagpart (submode, stack_parm);
3896 else
3898 parm_reg_stack_loc[regnor] = 0;
3899 parm_reg_stack_loc[regnoi] = 0;
3902 else
3903 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3905 /* Mark the register as eliminable if we did no conversion
3906 and it was copied from memory at a fixed offset,
3907 and the arg pointer was not copied to a pseudo-reg.
3908 If the arg pointer is a pseudo reg or the offset formed
3909 an invalid address, such memory-equivalences
3910 as we make here would screw up life analysis for it. */
3911 if (nominal_mode == passed_mode
3912 && ! did_conversion
3913 && GET_CODE (entry_parm) == MEM
3914 && entry_parm == stack_parm
3915 && stack_offset.var == 0
3916 && reg_mentioned_p (virtual_incoming_args_rtx,
3917 XEXP (entry_parm, 0)))
3919 rtx linsn = get_last_insn ();
3920 rtx sinsn, set;
3922 /* Mark complex types separately. */
3923 if (GET_CODE (parmreg) == CONCAT)
3924 /* Scan backwards for the set of the real and
3925 imaginary parts. */
3926 for (sinsn = linsn; sinsn != 0;
3927 sinsn = prev_nonnote_insn (sinsn))
3929 set = single_set (sinsn);
3930 if (set != 0
3931 && SET_DEST (set) == regno_reg_rtx [regnoi])
3932 REG_NOTES (sinsn)
3933 = gen_rtx (EXPR_LIST, REG_EQUIV,
3934 parm_reg_stack_loc[regnoi],
3935 REG_NOTES (sinsn));
3936 else if (set != 0
3937 && SET_DEST (set) == regno_reg_rtx [regnor])
3938 REG_NOTES (sinsn)
3939 = gen_rtx (EXPR_LIST, REG_EQUIV,
3940 parm_reg_stack_loc[regnor],
3941 REG_NOTES (sinsn));
3943 else if ((set = single_set (linsn)) != 0
3944 && SET_DEST (set) == parmreg)
3945 REG_NOTES (linsn)
3946 = gen_rtx (EXPR_LIST, REG_EQUIV,
3947 entry_parm, REG_NOTES (linsn));
3950 /* For pointer data type, suggest pointer register. */
3951 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3952 mark_reg_pointer (parmreg,
3953 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3954 / BITS_PER_UNIT));
3956 else
3958 /* Value must be stored in the stack slot STACK_PARM
3959 during function execution. */
3961 if (promoted_mode != nominal_mode)
3963 /* Conversion is required. */
3964 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3966 emit_move_insn (tempreg, validize_mem (entry_parm));
3968 push_to_sequence (conversion_insns);
3969 entry_parm = convert_to_mode (nominal_mode, tempreg,
3970 TREE_UNSIGNED (TREE_TYPE (parm)));
3971 conversion_insns = get_insns ();
3972 did_conversion = 1;
3973 end_sequence ();
3976 if (entry_parm != stack_parm)
3978 if (stack_parm == 0)
3980 stack_parm
3981 = assign_stack_local (GET_MODE (entry_parm),
3982 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3983 /* If this is a memory ref that contains aggregate components,
3984 mark it as such for cse and loop optimize. */
3985 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3988 if (promoted_mode != nominal_mode)
3990 push_to_sequence (conversion_insns);
3991 emit_move_insn (validize_mem (stack_parm),
3992 validize_mem (entry_parm));
3993 conversion_insns = get_insns ();
3994 end_sequence ();
3996 else
3997 emit_move_insn (validize_mem (stack_parm),
3998 validize_mem (entry_parm));
4001 DECL_RTL (parm) = stack_parm;
4004 /* If this "parameter" was the place where we are receiving the
4005 function's incoming structure pointer, set up the result. */
4006 if (parm == function_result_decl)
4008 tree result = DECL_RESULT (fndecl);
4009 tree restype = TREE_TYPE (result);
4011 DECL_RTL (result)
4012 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4014 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4017 if (TREE_THIS_VOLATILE (parm))
4018 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4019 if (TREE_READONLY (parm))
4020 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4023 /* Output all parameter conversion instructions (possibly including calls)
4024 now that all parameters have been copied out of hard registers. */
4025 emit_insns (conversion_insns);
4027 max_parm_reg = max_reg_num ();
4028 last_parm_insn = get_last_insn ();
4030 current_function_args_size = stack_args_size.constant;
4032 /* Adjust function incoming argument size for alignment and
4033 minimum length. */
4035 #ifdef REG_PARM_STACK_SPACE
4036 #ifndef MAYBE_REG_PARM_STACK_SPACE
4037 current_function_args_size = MAX (current_function_args_size,
4038 REG_PARM_STACK_SPACE (fndecl));
4039 #endif
4040 #endif
4042 #ifdef STACK_BOUNDARY
4043 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4045 current_function_args_size
4046 = ((current_function_args_size + STACK_BYTES - 1)
4047 / STACK_BYTES) * STACK_BYTES;
4048 #endif
4050 #ifdef ARGS_GROW_DOWNWARD
4051 current_function_arg_offset_rtx
4052 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4053 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4054 size_int (-stack_args_size.constant)),
4055 NULL_RTX, VOIDmode, 0));
4056 #else
4057 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4058 #endif
4060 /* See how many bytes, if any, of its args a function should try to pop
4061 on return. */
4063 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4064 current_function_args_size);
4066 /* For stdarg.h function, save info about
4067 regs and stack space used by the named args. */
4069 if (!hide_last_arg)
4070 current_function_args_info = args_so_far;
4072 /* Set the rtx used for the function return value. Put this in its
4073 own variable so any optimizers that need this information don't have
4074 to include tree.h. Do this here so it gets done when an inlined
4075 function gets output. */
4077 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4080 /* Indicate whether REGNO is an incoming argument to the current function
4081 that was promoted to a wider mode. If so, return the RTX for the
4082 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4083 that REGNO is promoted from and whether the promotion was signed or
4084 unsigned. */
4086 #ifdef PROMOTE_FUNCTION_ARGS
4089 promoted_input_arg (regno, pmode, punsignedp)
4090 int regno;
4091 enum machine_mode *pmode;
4092 int *punsignedp;
4094 tree arg;
4096 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4097 arg = TREE_CHAIN (arg))
4098 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4099 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4100 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4103 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4105 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4106 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4107 && mode != DECL_MODE (arg))
4109 *pmode = DECL_MODE (arg);
4110 *punsignedp = unsignedp;
4111 return DECL_INCOMING_RTL (arg);
4115 return 0;
4118 #endif
4120 /* Compute the size and offset from the start of the stacked arguments for a
4121 parm passed in mode PASSED_MODE and with type TYPE.
4123 INITIAL_OFFSET_PTR points to the current offset into the stacked
4124 arguments.
4126 The starting offset and size for this parm are returned in *OFFSET_PTR
4127 and *ARG_SIZE_PTR, respectively.
4129 IN_REGS is non-zero if the argument will be passed in registers. It will
4130 never be set if REG_PARM_STACK_SPACE is not defined.
4132 FNDECL is the function in which the argument was defined.
4134 There are two types of rounding that are done. The first, controlled by
4135 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4136 list to be aligned to the specific boundary (in bits). This rounding
4137 affects the initial and starting offsets, but not the argument size.
4139 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4140 optionally rounds the size of the parm to PARM_BOUNDARY. The
4141 initial offset is not affected by this rounding, while the size always
4142 is and the starting offset may be. */
4144 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4145 initial_offset_ptr is positive because locate_and_pad_parm's
4146 callers pass in the total size of args so far as
4147 initial_offset_ptr. arg_size_ptr is always positive.*/
4149 void
4150 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4151 initial_offset_ptr, offset_ptr, arg_size_ptr)
4152 enum machine_mode passed_mode;
4153 tree type;
4154 int in_regs;
4155 tree fndecl;
4156 struct args_size *initial_offset_ptr;
4157 struct args_size *offset_ptr;
4158 struct args_size *arg_size_ptr;
4160 tree sizetree
4161 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4162 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4163 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4164 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4165 int reg_parm_stack_space = 0;
4167 #ifdef REG_PARM_STACK_SPACE
4168 /* If we have found a stack parm before we reach the end of the
4169 area reserved for registers, skip that area. */
4170 if (! in_regs)
4172 #ifdef MAYBE_REG_PARM_STACK_SPACE
4173 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4174 #else
4175 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4176 #endif
4177 if (reg_parm_stack_space > 0)
4179 if (initial_offset_ptr->var)
4181 initial_offset_ptr->var
4182 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4183 size_int (reg_parm_stack_space));
4184 initial_offset_ptr->constant = 0;
4186 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4187 initial_offset_ptr->constant = reg_parm_stack_space;
4190 #endif /* REG_PARM_STACK_SPACE */
4192 arg_size_ptr->var = 0;
4193 arg_size_ptr->constant = 0;
4195 #ifdef ARGS_GROW_DOWNWARD
4196 if (initial_offset_ptr->var)
4198 offset_ptr->constant = 0;
4199 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4200 initial_offset_ptr->var);
4202 else
4204 offset_ptr->constant = - initial_offset_ptr->constant;
4205 offset_ptr->var = 0;
4207 if (where_pad != none
4208 && (TREE_CODE (sizetree) != INTEGER_CST
4209 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4210 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4211 SUB_PARM_SIZE (*offset_ptr, sizetree);
4212 if (where_pad != downward)
4213 pad_to_arg_alignment (offset_ptr, boundary);
4214 if (initial_offset_ptr->var)
4216 arg_size_ptr->var = size_binop (MINUS_EXPR,
4217 size_binop (MINUS_EXPR,
4218 integer_zero_node,
4219 initial_offset_ptr->var),
4220 offset_ptr->var);
4222 else
4224 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4225 offset_ptr->constant);
4227 #else /* !ARGS_GROW_DOWNWARD */
4228 pad_to_arg_alignment (initial_offset_ptr, boundary);
4229 *offset_ptr = *initial_offset_ptr;
4231 #ifdef PUSH_ROUNDING
4232 if (passed_mode != BLKmode)
4233 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4234 #endif
4236 /* Pad_below needs the pre-rounded size to know how much to pad below
4237 so this must be done before rounding up. */
4238 if (where_pad == downward
4239 /* However, BLKmode args passed in regs have their padding done elsewhere.
4240 The stack slot must be able to hold the entire register. */
4241 && !(in_regs && passed_mode == BLKmode))
4242 pad_below (offset_ptr, passed_mode, sizetree);
4244 if (where_pad != none
4245 && (TREE_CODE (sizetree) != INTEGER_CST
4246 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4247 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4249 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4250 #endif /* ARGS_GROW_DOWNWARD */
4253 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4254 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4256 static void
4257 pad_to_arg_alignment (offset_ptr, boundary)
4258 struct args_size *offset_ptr;
4259 int boundary;
4261 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4263 if (boundary > BITS_PER_UNIT)
4265 if (offset_ptr->var)
4267 offset_ptr->var =
4268 #ifdef ARGS_GROW_DOWNWARD
4269 round_down
4270 #else
4271 round_up
4272 #endif
4273 (ARGS_SIZE_TREE (*offset_ptr),
4274 boundary / BITS_PER_UNIT);
4275 offset_ptr->constant = 0; /*?*/
4277 else
4278 offset_ptr->constant =
4279 #ifdef ARGS_GROW_DOWNWARD
4280 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4281 #else
4282 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4283 #endif
4287 static void
4288 pad_below (offset_ptr, passed_mode, sizetree)
4289 struct args_size *offset_ptr;
4290 enum machine_mode passed_mode;
4291 tree sizetree;
4293 if (passed_mode != BLKmode)
4295 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4296 offset_ptr->constant
4297 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4298 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4299 - GET_MODE_SIZE (passed_mode));
4301 else
4303 if (TREE_CODE (sizetree) != INTEGER_CST
4304 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4306 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4307 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4308 /* Add it in. */
4309 ADD_PARM_SIZE (*offset_ptr, s2);
4310 SUB_PARM_SIZE (*offset_ptr, sizetree);
4315 static tree
4316 round_down (value, divisor)
4317 tree value;
4318 int divisor;
4320 return size_binop (MULT_EXPR,
4321 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4322 size_int (divisor));
4325 /* Walk the tree of blocks describing the binding levels within a function
4326 and warn about uninitialized variables.
4327 This is done after calling flow_analysis and before global_alloc
4328 clobbers the pseudo-regs to hard regs. */
4330 void
4331 uninitialized_vars_warning (block)
4332 tree block;
4334 register tree decl, sub;
4335 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4337 if (TREE_CODE (decl) == VAR_DECL
4338 /* These warnings are unreliable for and aggregates
4339 because assigning the fields one by one can fail to convince
4340 flow.c that the entire aggregate was initialized.
4341 Unions are troublesome because members may be shorter. */
4342 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4343 && DECL_RTL (decl) != 0
4344 && GET_CODE (DECL_RTL (decl)) == REG
4345 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4346 warning_with_decl (decl,
4347 "`%s' might be used uninitialized in this function");
4348 if (TREE_CODE (decl) == VAR_DECL
4349 && DECL_RTL (decl) != 0
4350 && GET_CODE (DECL_RTL (decl)) == REG
4351 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4352 warning_with_decl (decl,
4353 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4355 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4356 uninitialized_vars_warning (sub);
4359 /* Do the appropriate part of uninitialized_vars_warning
4360 but for arguments instead of local variables. */
4362 void
4363 setjmp_args_warning ()
4365 register tree decl;
4366 for (decl = DECL_ARGUMENTS (current_function_decl);
4367 decl; decl = TREE_CHAIN (decl))
4368 if (DECL_RTL (decl) != 0
4369 && GET_CODE (DECL_RTL (decl)) == REG
4370 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4371 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4374 /* If this function call setjmp, put all vars into the stack
4375 unless they were declared `register'. */
4377 void
4378 setjmp_protect (block)
4379 tree block;
4381 register tree decl, sub;
4382 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4383 if ((TREE_CODE (decl) == VAR_DECL
4384 || TREE_CODE (decl) == PARM_DECL)
4385 && DECL_RTL (decl) != 0
4386 && GET_CODE (DECL_RTL (decl)) == REG
4387 /* If this variable came from an inline function, it must be
4388 that it's life doesn't overlap the setjmp. If there was a
4389 setjmp in the function, it would already be in memory. We
4390 must exclude such variable because their DECL_RTL might be
4391 set to strange things such as virtual_stack_vars_rtx. */
4392 && ! DECL_FROM_INLINE (decl)
4393 && (
4394 #ifdef NON_SAVING_SETJMP
4395 /* If longjmp doesn't restore the registers,
4396 don't put anything in them. */
4397 NON_SAVING_SETJMP
4399 #endif
4400 ! DECL_REGISTER (decl)))
4401 put_var_into_stack (decl);
4402 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4403 setjmp_protect (sub);
4406 /* Like the previous function, but for args instead of local variables. */
4408 void
4409 setjmp_protect_args ()
4411 register tree decl, sub;
4412 for (decl = DECL_ARGUMENTS (current_function_decl);
4413 decl; decl = TREE_CHAIN (decl))
4414 if ((TREE_CODE (decl) == VAR_DECL
4415 || TREE_CODE (decl) == PARM_DECL)
4416 && DECL_RTL (decl) != 0
4417 && GET_CODE (DECL_RTL (decl)) == REG
4418 && (
4419 /* If longjmp doesn't restore the registers,
4420 don't put anything in them. */
4421 #ifdef NON_SAVING_SETJMP
4422 NON_SAVING_SETJMP
4424 #endif
4425 ! DECL_REGISTER (decl)))
4426 put_var_into_stack (decl);
4429 /* Return the context-pointer register corresponding to DECL,
4430 or 0 if it does not need one. */
4433 lookup_static_chain (decl)
4434 tree decl;
4436 tree context = decl_function_context (decl);
4437 tree link;
4439 if (context == 0
4440 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4441 return 0;
4443 /* We treat inline_function_decl as an alias for the current function
4444 because that is the inline function whose vars, types, etc.
4445 are being merged into the current function.
4446 See expand_inline_function. */
4447 if (context == current_function_decl || context == inline_function_decl)
4448 return virtual_stack_vars_rtx;
4450 for (link = context_display; link; link = TREE_CHAIN (link))
4451 if (TREE_PURPOSE (link) == context)
4452 return RTL_EXPR_RTL (TREE_VALUE (link));
4454 abort ();
4457 /* Convert a stack slot address ADDR for variable VAR
4458 (from a containing function)
4459 into an address valid in this function (using a static chain). */
4462 fix_lexical_addr (addr, var)
4463 rtx addr;
4464 tree var;
4466 rtx basereg;
4467 int displacement;
4468 tree context = decl_function_context (var);
4469 struct function *fp;
4470 rtx base = 0;
4472 /* If this is the present function, we need not do anything. */
4473 if (context == current_function_decl || context == inline_function_decl)
4474 return addr;
4476 for (fp = outer_function_chain; fp; fp = fp->next)
4477 if (fp->decl == context)
4478 break;
4480 if (fp == 0)
4481 abort ();
4483 /* Decode given address as base reg plus displacement. */
4484 if (GET_CODE (addr) == REG)
4485 basereg = addr, displacement = 0;
4486 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4487 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4488 else
4489 abort ();
4491 /* We accept vars reached via the containing function's
4492 incoming arg pointer and via its stack variables pointer. */
4493 if (basereg == fp->internal_arg_pointer)
4495 /* If reached via arg pointer, get the arg pointer value
4496 out of that function's stack frame.
4498 There are two cases: If a separate ap is needed, allocate a
4499 slot in the outer function for it and dereference it that way.
4500 This is correct even if the real ap is actually a pseudo.
4501 Otherwise, just adjust the offset from the frame pointer to
4502 compensate. */
4504 #ifdef NEED_SEPARATE_AP
4505 rtx addr;
4507 if (fp->arg_pointer_save_area == 0)
4508 fp->arg_pointer_save_area
4509 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4511 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4512 addr = memory_address (Pmode, addr);
4514 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4515 #else
4516 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4517 base = lookup_static_chain (var);
4518 #endif
4521 else if (basereg == virtual_stack_vars_rtx)
4523 /* This is the same code as lookup_static_chain, duplicated here to
4524 avoid an extra call to decl_function_context. */
4525 tree link;
4527 for (link = context_display; link; link = TREE_CHAIN (link))
4528 if (TREE_PURPOSE (link) == context)
4530 base = RTL_EXPR_RTL (TREE_VALUE (link));
4531 break;
4535 if (base == 0)
4536 abort ();
4538 /* Use same offset, relative to appropriate static chain or argument
4539 pointer. */
4540 return plus_constant (base, displacement);
4543 /* Return the address of the trampoline for entering nested fn FUNCTION.
4544 If necessary, allocate a trampoline (in the stack frame)
4545 and emit rtl to initialize its contents (at entry to this function). */
4548 trampoline_address (function)
4549 tree function;
4551 tree link;
4552 tree rtlexp;
4553 rtx tramp;
4554 struct function *fp;
4555 tree fn_context;
4557 /* Find an existing trampoline and return it. */
4558 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4559 if (TREE_PURPOSE (link) == function)
4560 return
4561 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4563 for (fp = outer_function_chain; fp; fp = fp->next)
4564 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4565 if (TREE_PURPOSE (link) == function)
4567 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4568 function);
4569 return round_trampoline_addr (tramp);
4572 /* None exists; we must make one. */
4574 /* Find the `struct function' for the function containing FUNCTION. */
4575 fp = 0;
4576 fn_context = decl_function_context (function);
4577 if (fn_context != current_function_decl)
4578 for (fp = outer_function_chain; fp; fp = fp->next)
4579 if (fp->decl == fn_context)
4580 break;
4582 /* Allocate run-time space for this trampoline
4583 (usually in the defining function's stack frame). */
4584 #ifdef ALLOCATE_TRAMPOLINE
4585 tramp = ALLOCATE_TRAMPOLINE (fp);
4586 #else
4587 /* If rounding needed, allocate extra space
4588 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4589 #ifdef TRAMPOLINE_ALIGNMENT
4590 #define TRAMPOLINE_REAL_SIZE \
4591 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4592 #else
4593 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4594 #endif
4595 if (fp != 0)
4596 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4597 else
4598 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4599 #endif
4601 /* Record the trampoline for reuse and note it for later initialization
4602 by expand_function_end. */
4603 if (fp != 0)
4605 push_obstacks (fp->function_maybepermanent_obstack,
4606 fp->function_maybepermanent_obstack);
4607 rtlexp = make_node (RTL_EXPR);
4608 RTL_EXPR_RTL (rtlexp) = tramp;
4609 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4610 pop_obstacks ();
4612 else
4614 /* Make the RTL_EXPR node temporary, not momentary, so that the
4615 trampoline_list doesn't become garbage. */
4616 int momentary = suspend_momentary ();
4617 rtlexp = make_node (RTL_EXPR);
4618 resume_momentary (momentary);
4620 RTL_EXPR_RTL (rtlexp) = tramp;
4621 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4624 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4625 return round_trampoline_addr (tramp);
4628 /* Given a trampoline address,
4629 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4631 static rtx
4632 round_trampoline_addr (tramp)
4633 rtx tramp;
4635 #ifdef TRAMPOLINE_ALIGNMENT
4636 /* Round address up to desired boundary. */
4637 rtx temp = gen_reg_rtx (Pmode);
4638 temp = expand_binop (Pmode, add_optab, tramp,
4639 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4640 temp, 0, OPTAB_LIB_WIDEN);
4641 tramp = expand_binop (Pmode, and_optab, temp,
4642 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4643 temp, 0, OPTAB_LIB_WIDEN);
4644 #endif
4645 return tramp;
4648 /* The functions identify_blocks and reorder_blocks provide a way to
4649 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4650 duplicate portions of the RTL code. Call identify_blocks before
4651 changing the RTL, and call reorder_blocks after. */
4653 /* Put all this function's BLOCK nodes including those that are chained
4654 onto the first block into a vector, and return it.
4655 Also store in each NOTE for the beginning or end of a block
4656 the index of that block in the vector.
4657 The arguments are BLOCK, the chain of top-level blocks of the function,
4658 and INSNS, the insn chain of the function. */
4660 tree *
4661 identify_blocks (block, insns)
4662 tree block;
4663 rtx insns;
4665 int n_blocks;
4666 tree *block_vector;
4667 int *block_stack;
4668 int depth = 0;
4669 int next_block_number = 1;
4670 int current_block_number = 1;
4671 rtx insn;
4673 if (block == 0)
4674 return 0;
4676 n_blocks = all_blocks (block, 0);
4677 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4678 block_stack = (int *) alloca (n_blocks * sizeof (int));
4680 all_blocks (block, block_vector);
4682 for (insn = insns; insn; insn = NEXT_INSN (insn))
4683 if (GET_CODE (insn) == NOTE)
4685 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4687 block_stack[depth++] = current_block_number;
4688 current_block_number = next_block_number;
4689 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4691 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4693 current_block_number = block_stack[--depth];
4694 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4698 if (n_blocks != next_block_number)
4699 abort ();
4701 return block_vector;
4704 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4705 and a revised instruction chain, rebuild the tree structure
4706 of BLOCK nodes to correspond to the new order of RTL.
4707 The new block tree is inserted below TOP_BLOCK.
4708 Returns the current top-level block. */
4710 tree
4711 reorder_blocks (block_vector, block, insns)
4712 tree *block_vector;
4713 tree block;
4714 rtx insns;
4716 tree current_block = block;
4717 rtx insn;
4719 if (block_vector == 0)
4720 return block;
4722 /* Prune the old trees away, so that it doesn't get in the way. */
4723 BLOCK_SUBBLOCKS (current_block) = 0;
4724 BLOCK_CHAIN (current_block) = 0;
4726 for (insn = insns; insn; insn = NEXT_INSN (insn))
4727 if (GET_CODE (insn) == NOTE)
4729 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4731 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4732 /* If we have seen this block before, copy it. */
4733 if (TREE_ASM_WRITTEN (block))
4734 block = copy_node (block);
4735 BLOCK_SUBBLOCKS (block) = 0;
4736 TREE_ASM_WRITTEN (block) = 1;
4737 BLOCK_SUPERCONTEXT (block) = current_block;
4738 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4739 BLOCK_SUBBLOCKS (current_block) = block;
4740 current_block = block;
4741 NOTE_SOURCE_FILE (insn) = 0;
4743 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4745 BLOCK_SUBBLOCKS (current_block)
4746 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4747 current_block = BLOCK_SUPERCONTEXT (current_block);
4748 NOTE_SOURCE_FILE (insn) = 0;
4752 BLOCK_SUBBLOCKS (current_block)
4753 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4754 return current_block;
4757 /* Reverse the order of elements in the chain T of blocks,
4758 and return the new head of the chain (old last element). */
4760 static tree
4761 blocks_nreverse (t)
4762 tree t;
4764 register tree prev = 0, decl, next;
4765 for (decl = t; decl; decl = next)
4767 next = BLOCK_CHAIN (decl);
4768 BLOCK_CHAIN (decl) = prev;
4769 prev = decl;
4771 return prev;
4774 /* Count the subblocks of the list starting with BLOCK, and list them
4775 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4776 blocks. */
4778 static int
4779 all_blocks (block, vector)
4780 tree block;
4781 tree *vector;
4783 int n_blocks = 0;
4785 while (block)
4787 TREE_ASM_WRITTEN (block) = 0;
4789 /* Record this block. */
4790 if (vector)
4791 vector[n_blocks] = block;
4793 ++n_blocks;
4795 /* Record the subblocks, and their subblocks... */
4796 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4797 vector ? vector + n_blocks : 0);
4798 block = BLOCK_CHAIN (block);
4801 return n_blocks;
4804 /* Build bytecode call descriptor for function SUBR. */
4807 bc_build_calldesc (subr)
4808 tree subr;
4810 tree calldesc = 0, arg;
4811 int nargs = 0;
4813 /* Build the argument description vector in reverse order. */
4814 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4815 nargs = 0;
4817 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4819 ++nargs;
4821 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4822 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4825 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4827 /* Prepend the function's return type. */
4828 calldesc = tree_cons ((tree) 0,
4829 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4830 calldesc);
4832 calldesc = tree_cons ((tree) 0,
4833 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4834 calldesc);
4836 /* Prepend the arg count. */
4837 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4839 /* Output the call description vector and get its address. */
4840 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4841 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4842 build_index_type (build_int_2 (nargs * 2, 0)));
4844 return output_constant_def (calldesc);
4848 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4849 and initialize static variables for generating RTL for the statements
4850 of the function. */
4852 void
4853 init_function_start (subr, filename, line)
4854 tree subr;
4855 char *filename;
4856 int line;
4858 char *junk;
4860 if (output_bytecode)
4862 this_function_decl = subr;
4863 this_function_calldesc = bc_build_calldesc (subr);
4864 local_vars_size = 0;
4865 stack_depth = 0;
4866 max_stack_depth = 0;
4867 stmt_expr_depth = 0;
4868 return;
4871 init_stmt_for_function ();
4873 cse_not_expected = ! optimize;
4875 /* Caller save not needed yet. */
4876 caller_save_needed = 0;
4878 /* No stack slots have been made yet. */
4879 stack_slot_list = 0;
4881 /* There is no stack slot for handling nonlocal gotos. */
4882 nonlocal_goto_handler_slot = 0;
4883 nonlocal_goto_stack_level = 0;
4885 /* No labels have been declared for nonlocal use. */
4886 nonlocal_labels = 0;
4888 /* No function calls so far in this function. */
4889 function_call_count = 0;
4891 /* No parm regs have been allocated.
4892 (This is important for output_inline_function.) */
4893 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4895 /* Initialize the RTL mechanism. */
4896 init_emit ();
4898 /* Initialize the queue of pending postincrement and postdecrements,
4899 and some other info in expr.c. */
4900 init_expr ();
4902 /* We haven't done register allocation yet. */
4903 reg_renumber = 0;
4905 init_const_rtx_hash_table ();
4907 current_function_name = (*decl_printable_name) (subr, &junk);
4909 /* Nonzero if this is a nested function that uses a static chain. */
4911 current_function_needs_context
4912 = (decl_function_context (current_function_decl) != 0
4913 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4915 /* Set if a call to setjmp is seen. */
4916 current_function_calls_setjmp = 0;
4918 /* Set if a call to longjmp is seen. */
4919 current_function_calls_longjmp = 0;
4921 current_function_calls_alloca = 0;
4922 current_function_has_nonlocal_label = 0;
4923 current_function_has_nonlocal_goto = 0;
4924 current_function_contains_functions = 0;
4926 current_function_returns_pcc_struct = 0;
4927 current_function_returns_struct = 0;
4928 current_function_epilogue_delay_list = 0;
4929 current_function_uses_const_pool = 0;
4930 current_function_uses_pic_offset_table = 0;
4932 /* We have not yet needed to make a label to jump to for tail-recursion. */
4933 tail_recursion_label = 0;
4935 /* We haven't had a need to make a save area for ap yet. */
4937 arg_pointer_save_area = 0;
4939 /* No stack slots allocated yet. */
4940 frame_offset = 0;
4942 /* No SAVE_EXPRs in this function yet. */
4943 save_expr_regs = 0;
4945 /* No RTL_EXPRs in this function yet. */
4946 rtl_expr_chain = 0;
4948 /* Set up to allocate temporaries. */
4949 init_temp_slots ();
4951 /* Within function body, compute a type's size as soon it is laid out. */
4952 immediate_size_expand++;
4954 /* We haven't made any trampolines for this function yet. */
4955 trampoline_list = 0;
4957 init_pending_stack_adjust ();
4958 inhibit_defer_pop = 0;
4960 current_function_outgoing_args_size = 0;
4962 /* Prevent ever trying to delete the first instruction of a function.
4963 Also tell final how to output a linenum before the function prologue. */
4964 emit_line_note (filename, line);
4966 /* Make sure first insn is a note even if we don't want linenums.
4967 This makes sure the first insn will never be deleted.
4968 Also, final expects a note to appear there. */
4969 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4971 /* Set flags used by final.c. */
4972 if (aggregate_value_p (DECL_RESULT (subr)))
4974 #ifdef PCC_STATIC_STRUCT_RETURN
4975 current_function_returns_pcc_struct = 1;
4976 #endif
4977 current_function_returns_struct = 1;
4980 /* Warn if this value is an aggregate type,
4981 regardless of which calling convention we are using for it. */
4982 if (warn_aggregate_return
4983 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4984 warning ("function returns an aggregate");
4986 current_function_returns_pointer
4987 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4989 /* Indicate that we need to distinguish between the return value of the
4990 present function and the return value of a function being called. */
4991 rtx_equal_function_value_matters = 1;
4993 /* Indicate that we have not instantiated virtual registers yet. */
4994 virtuals_instantiated = 0;
4996 /* Indicate we have no need of a frame pointer yet. */
4997 frame_pointer_needed = 0;
4999 /* By default assume not varargs or stdarg. */
5000 current_function_varargs = 0;
5001 current_function_stdarg = 0;
5004 /* Indicate that the current function uses extra args
5005 not explicitly mentioned in the argument list in any fashion. */
5007 void
5008 mark_varargs ()
5010 current_function_varargs = 1;
5013 /* Expand a call to __main at the beginning of a possible main function. */
5015 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5016 #undef HAS_INIT_SECTION
5017 #define HAS_INIT_SECTION
5018 #endif
5020 void
5021 expand_main_function ()
5023 if (!output_bytecode)
5025 /* The zero below avoids a possible parse error */
5027 #if !defined (HAS_INIT_SECTION)
5028 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5029 VOIDmode, 0);
5030 #endif /* not HAS_INIT_SECTION */
5034 extern struct obstack permanent_obstack;
5036 /* Expand start of bytecode function. See comment at
5037 expand_function_start below for details. */
5039 void
5040 bc_expand_function_start (subr, parms_have_cleanups)
5041 tree subr;
5042 int parms_have_cleanups;
5044 char label[20], *name;
5045 static int nlab;
5046 tree thisarg;
5047 int argsz;
5049 if (TREE_PUBLIC (subr))
5050 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5052 #ifdef DEBUG_PRINT_CODE
5053 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5054 #endif
5056 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5058 if (DECL_RTL (thisarg))
5059 abort (); /* Should be NULL here I think. */
5060 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5062 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5063 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5065 else
5067 /* Variable-sized objects are pointers to their storage. */
5068 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5069 argsz += POINTER_SIZE;
5073 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5075 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5077 ++nlab;
5078 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5079 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5080 this_function_bytecode =
5081 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5085 /* Expand end of bytecode function. See details the comment of
5086 expand_function_end(), below. */
5088 void
5089 bc_expand_function_end ()
5091 char *ptrconsts;
5093 expand_null_return ();
5095 /* Emit any fixup code. This must be done before the call to
5096 to BC_END_FUNCTION (), since that will cause the bytecode
5097 segment to be finished off and closed. */
5099 expand_fixups (NULL_RTX);
5101 ptrconsts = bc_end_function ();
5103 bc_align_const (2 /* INT_ALIGN */);
5105 /* If this changes also make sure to change bc-interp.h! */
5107 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5108 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5109 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5110 bc_emit_const_labelref (this_function_bytecode, 0);
5111 bc_emit_const_labelref (ptrconsts, 0);
5112 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5116 /* Start the RTL for a new function, and set variables used for
5117 emitting RTL.
5118 SUBR is the FUNCTION_DECL node.
5119 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5120 the function's parameters, which must be run at any return statement. */
5122 void
5123 expand_function_start (subr, parms_have_cleanups)
5124 tree subr;
5125 int parms_have_cleanups;
5127 register int i;
5128 tree tem;
5129 rtx last_ptr;
5131 if (output_bytecode)
5133 bc_expand_function_start (subr, parms_have_cleanups);
5134 return;
5137 /* Make sure volatile mem refs aren't considered
5138 valid operands of arithmetic insns. */
5139 init_recog_no_volatile ();
5141 /* If function gets a static chain arg, store it in the stack frame.
5142 Do this first, so it gets the first stack slot offset. */
5143 if (current_function_needs_context)
5145 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5147 #ifdef SMALL_REGISTER_CLASSES
5148 /* Delay copying static chain if it is not a register to avoid
5149 conflicts with regs used for parameters. */
5150 if (GET_CODE (static_chain_incoming_rtx) == REG)
5151 #endif
5152 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5155 /* If the parameters of this function need cleaning up, get a label
5156 for the beginning of the code which executes those cleanups. This must
5157 be done before doing anything with return_label. */
5158 if (parms_have_cleanups)
5159 cleanup_label = gen_label_rtx ();
5160 else
5161 cleanup_label = 0;
5163 /* Make the label for return statements to jump to, if this machine
5164 does not have a one-instruction return and uses an epilogue,
5165 or if it returns a structure, or if it has parm cleanups. */
5166 #ifdef HAVE_return
5167 if (cleanup_label == 0 && HAVE_return
5168 && ! current_function_returns_pcc_struct
5169 && ! (current_function_returns_struct && ! optimize))
5170 return_label = 0;
5171 else
5172 return_label = gen_label_rtx ();
5173 #else
5174 return_label = gen_label_rtx ();
5175 #endif
5177 /* Initialize rtx used to return the value. */
5178 /* Do this before assign_parms so that we copy the struct value address
5179 before any library calls that assign parms might generate. */
5181 /* Decide whether to return the value in memory or in a register. */
5182 if (aggregate_value_p (DECL_RESULT (subr)))
5184 /* Returning something that won't go in a register. */
5185 register rtx value_address = 0;
5187 #ifdef PCC_STATIC_STRUCT_RETURN
5188 if (current_function_returns_pcc_struct)
5190 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5191 value_address = assemble_static_space (size);
5193 else
5194 #endif
5196 /* Expect to be passed the address of a place to store the value.
5197 If it is passed as an argument, assign_parms will take care of
5198 it. */
5199 if (struct_value_incoming_rtx)
5201 value_address = gen_reg_rtx (Pmode);
5202 emit_move_insn (value_address, struct_value_incoming_rtx);
5205 if (value_address)
5207 DECL_RTL (DECL_RESULT (subr))
5208 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5209 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5210 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5213 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5214 /* If return mode is void, this decl rtl should not be used. */
5215 DECL_RTL (DECL_RESULT (subr)) = 0;
5216 else if (parms_have_cleanups)
5218 /* If function will end with cleanup code for parms,
5219 compute the return values into a pseudo reg,
5220 which we will copy into the true return register
5221 after the cleanups are done. */
5223 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5225 #ifdef PROMOTE_FUNCTION_RETURN
5226 tree type = TREE_TYPE (DECL_RESULT (subr));
5227 int unsignedp = TREE_UNSIGNED (type);
5229 mode = promote_mode (type, mode, &unsignedp, 1);
5230 #endif
5232 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5234 else
5235 /* Scalar, returned in a register. */
5237 #ifdef FUNCTION_OUTGOING_VALUE
5238 DECL_RTL (DECL_RESULT (subr))
5239 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5240 #else
5241 DECL_RTL (DECL_RESULT (subr))
5242 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5243 #endif
5245 /* Mark this reg as the function's return value. */
5246 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5248 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5249 /* Needed because we may need to move this to memory
5250 in case it's a named return value whose address is taken. */
5251 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5255 /* Initialize rtx for parameters and local variables.
5256 In some cases this requires emitting insns. */
5258 assign_parms (subr, 0);
5260 #ifdef SMALL_REGISTER_CLASSES
5261 /* Copy the static chain now if it wasn't a register. The delay is to
5262 avoid conflicts with the parameter passing registers. */
5264 if (current_function_needs_context)
5265 if (GET_CODE (static_chain_incoming_rtx) != REG)
5266 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5267 #endif
5269 /* The following was moved from init_function_start.
5270 The move is supposed to make sdb output more accurate. */
5271 /* Indicate the beginning of the function body,
5272 as opposed to parm setup. */
5273 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5275 /* If doing stupid allocation, mark parms as born here. */
5277 if (GET_CODE (get_last_insn ()) != NOTE)
5278 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5279 parm_birth_insn = get_last_insn ();
5281 if (obey_regdecls)
5283 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5284 use_variable (regno_reg_rtx[i]);
5286 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5287 use_variable (current_function_internal_arg_pointer);
5290 context_display = 0;
5291 if (current_function_needs_context)
5293 /* Fetch static chain values for containing functions. */
5294 tem = decl_function_context (current_function_decl);
5295 /* If not doing stupid register allocation copy the static chain
5296 pointer into a pseudo. If we have small register classes, copy
5297 the value from memory if static_chain_incoming_rtx is a REG. If
5298 we do stupid register allocation, we use the stack address
5299 generated above. */
5300 if (tem && ! obey_regdecls)
5302 #ifdef SMALL_REGISTER_CLASSES
5303 /* If the static chain originally came in a register, put it back
5304 there, then move it out in the next insn. The reason for
5305 this peculiar code is to satisfy function integration. */
5306 if (GET_CODE (static_chain_incoming_rtx) == REG)
5307 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5308 #endif
5310 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5313 while (tem)
5315 tree rtlexp = make_node (RTL_EXPR);
5317 RTL_EXPR_RTL (rtlexp) = last_ptr;
5318 context_display = tree_cons (tem, rtlexp, context_display);
5319 tem = decl_function_context (tem);
5320 if (tem == 0)
5321 break;
5322 /* Chain thru stack frames, assuming pointer to next lexical frame
5323 is found at the place we always store it. */
5324 #ifdef FRAME_GROWS_DOWNWARD
5325 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5326 #endif
5327 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5328 memory_address (Pmode, last_ptr)));
5330 /* If we are not optimizing, ensure that we know that this
5331 piece of context is live over the entire function. */
5332 if (! optimize)
5333 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5334 save_expr_regs);
5338 /* After the display initializations is where the tail-recursion label
5339 should go, if we end up needing one. Ensure we have a NOTE here
5340 since some things (like trampolines) get placed before this. */
5341 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5343 /* Evaluate now the sizes of any types declared among the arguments. */
5344 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5345 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5347 /* Make sure there is a line number after the function entry setup code. */
5348 force_next_line_note ();
5351 /* Generate RTL for the end of the current function.
5352 FILENAME and LINE are the current position in the source file.
5354 It is up to language-specific callers to do cleanups for parameters--
5355 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5357 void
5358 expand_function_end (filename, line, end_bindings)
5359 char *filename;
5360 int line;
5361 int end_bindings;
5363 register int i;
5364 tree link;
5366 #ifdef TRAMPOLINE_TEMPLATE
5367 static rtx initial_trampoline;
5368 #endif
5370 if (output_bytecode)
5372 bc_expand_function_end ();
5373 return;
5376 #ifdef NON_SAVING_SETJMP
5377 /* Don't put any variables in registers if we call setjmp
5378 on a machine that fails to restore the registers. */
5379 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5381 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5382 setjmp_protect (DECL_INITIAL (current_function_decl));
5384 setjmp_protect_args ();
5386 #endif
5388 /* Save the argument pointer if a save area was made for it. */
5389 if (arg_pointer_save_area)
5391 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5392 emit_insn_before (x, tail_recursion_reentry);
5395 /* Initialize any trampolines required by this function. */
5396 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5398 tree function = TREE_PURPOSE (link);
5399 rtx context = lookup_static_chain (function);
5400 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5401 rtx blktramp;
5402 rtx seq;
5404 #ifdef TRAMPOLINE_TEMPLATE
5405 /* First make sure this compilation has a template for
5406 initializing trampolines. */
5407 if (initial_trampoline == 0)
5409 end_temporary_allocation ();
5410 initial_trampoline
5411 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5412 resume_temporary_allocation ();
5414 #endif
5416 /* Generate insns to initialize the trampoline. */
5417 start_sequence ();
5418 tramp = round_trampoline_addr (XEXP (tramp, 0));
5419 #ifdef TRAMPOLINE_TEMPLATE
5420 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5421 emit_block_move (blktramp, initial_trampoline,
5422 GEN_INT (TRAMPOLINE_SIZE),
5423 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5424 #endif
5425 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5426 seq = get_insns ();
5427 end_sequence ();
5429 /* Put those insns at entry to the containing function (this one). */
5430 emit_insns_before (seq, tail_recursion_reentry);
5433 /* Warn about unused parms if extra warnings were specified. */
5434 if (warn_unused && extra_warnings)
5436 tree decl;
5438 for (decl = DECL_ARGUMENTS (current_function_decl);
5439 decl; decl = TREE_CHAIN (decl))
5440 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5441 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5442 warning_with_decl (decl, "unused parameter `%s'");
5445 /* Delete handlers for nonlocal gotos if nothing uses them. */
5446 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5447 delete_handlers ();
5449 /* End any sequences that failed to be closed due to syntax errors. */
5450 while (in_sequence_p ())
5451 end_sequence ();
5453 /* Outside function body, can't compute type's actual size
5454 until next function's body starts. */
5455 immediate_size_expand--;
5457 /* If doing stupid register allocation,
5458 mark register parms as dying here. */
5460 if (obey_regdecls)
5462 rtx tem;
5463 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5464 use_variable (regno_reg_rtx[i]);
5466 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5468 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5470 use_variable (XEXP (tem, 0));
5471 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5474 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5475 use_variable (current_function_internal_arg_pointer);
5478 clear_pending_stack_adjust ();
5479 do_pending_stack_adjust ();
5481 /* Mark the end of the function body.
5482 If control reaches this insn, the function can drop through
5483 without returning a value. */
5484 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5486 /* Output a linenumber for the end of the function.
5487 SDB depends on this. */
5488 emit_line_note_force (filename, line);
5490 /* Output the label for the actual return from the function,
5491 if one is expected. This happens either because a function epilogue
5492 is used instead of a return instruction, or because a return was done
5493 with a goto in order to run local cleanups, or because of pcc-style
5494 structure returning. */
5496 if (return_label)
5497 emit_label (return_label);
5499 /* C++ uses this. */
5500 if (end_bindings)
5501 expand_end_bindings (0, 0, 0);
5503 /* If we had calls to alloca, and this machine needs
5504 an accurate stack pointer to exit the function,
5505 insert some code to save and restore the stack pointer. */
5506 #ifdef EXIT_IGNORE_STACK
5507 if (! EXIT_IGNORE_STACK)
5508 #endif
5509 if (current_function_calls_alloca)
5511 rtx tem = 0;
5513 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5514 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5517 /* If scalar return value was computed in a pseudo-reg,
5518 copy that to the hard return register. */
5519 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5520 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5521 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5522 >= FIRST_PSEUDO_REGISTER))
5524 rtx real_decl_result;
5526 #ifdef FUNCTION_OUTGOING_VALUE
5527 real_decl_result
5528 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5529 current_function_decl);
5530 #else
5531 real_decl_result
5532 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5533 current_function_decl);
5534 #endif
5535 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5536 emit_move_insn (real_decl_result,
5537 DECL_RTL (DECL_RESULT (current_function_decl)));
5538 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5541 /* If returning a structure, arrange to return the address of the value
5542 in a place where debuggers expect to find it.
5544 If returning a structure PCC style,
5545 the caller also depends on this value.
5546 And current_function_returns_pcc_struct is not necessarily set. */
5547 if (current_function_returns_struct
5548 || current_function_returns_pcc_struct)
5550 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5551 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5552 #ifdef FUNCTION_OUTGOING_VALUE
5553 rtx outgoing
5554 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5555 current_function_decl);
5556 #else
5557 rtx outgoing
5558 = FUNCTION_VALUE (build_pointer_type (type),
5559 current_function_decl);
5560 #endif
5562 /* Mark this as a function return value so integrate will delete the
5563 assignment and USE below when inlining this function. */
5564 REG_FUNCTION_VALUE_P (outgoing) = 1;
5566 emit_move_insn (outgoing, value_address);
5567 use_variable (outgoing);
5570 /* Output a return insn if we are using one.
5571 Otherwise, let the rtl chain end here, to drop through
5572 into the epilogue. */
5574 #ifdef HAVE_return
5575 if (HAVE_return)
5577 emit_jump_insn (gen_return ());
5578 emit_barrier ();
5580 #endif
5582 /* Fix up any gotos that jumped out to the outermost
5583 binding level of the function.
5584 Must follow emitting RETURN_LABEL. */
5586 /* If you have any cleanups to do at this point,
5587 and they need to create temporary variables,
5588 then you will lose. */
5589 expand_fixups (get_insns ());
5592 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5594 static int *prologue;
5595 static int *epilogue;
5597 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5598 or a single insn). */
5600 static int *
5601 record_insns (insns)
5602 rtx insns;
5604 int *vec;
5606 if (GET_CODE (insns) == SEQUENCE)
5608 int len = XVECLEN (insns, 0);
5609 vec = (int *) oballoc ((len + 1) * sizeof (int));
5610 vec[len] = 0;
5611 while (--len >= 0)
5612 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5614 else
5616 vec = (int *) oballoc (2 * sizeof (int));
5617 vec[0] = INSN_UID (insns);
5618 vec[1] = 0;
5620 return vec;
5623 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5625 static int
5626 contains (insn, vec)
5627 rtx insn;
5628 int *vec;
5630 register int i, j;
5632 if (GET_CODE (insn) == INSN
5633 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5635 int count = 0;
5636 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5637 for (j = 0; vec[j]; j++)
5638 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5639 count++;
5640 return count;
5642 else
5644 for (j = 0; vec[j]; j++)
5645 if (INSN_UID (insn) == vec[j])
5646 return 1;
5648 return 0;
5651 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5652 this into place with notes indicating where the prologue ends and where
5653 the epilogue begins. Update the basic block information when possible. */
5655 void
5656 thread_prologue_and_epilogue_insns (f)
5657 rtx f;
5659 #ifdef HAVE_prologue
5660 if (HAVE_prologue)
5662 rtx head, seq, insn;
5664 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5665 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5666 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5667 seq = gen_prologue ();
5668 head = emit_insn_after (seq, f);
5670 /* Include the new prologue insns in the first block. Ignore them
5671 if they form a basic block unto themselves. */
5672 if (basic_block_head && n_basic_blocks
5673 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5674 basic_block_head[0] = NEXT_INSN (f);
5676 /* Retain a map of the prologue insns. */
5677 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5679 else
5680 #endif
5681 prologue = 0;
5683 #ifdef HAVE_epilogue
5684 if (HAVE_epilogue)
5686 rtx insn = get_last_insn ();
5687 rtx prev = prev_nonnote_insn (insn);
5689 /* If we end with a BARRIER, we don't need an epilogue. */
5690 if (! (prev && GET_CODE (prev) == BARRIER))
5692 rtx tail, seq, tem;
5693 rtx first_use = 0;
5694 rtx last_use = 0;
5696 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5697 epilogue insns, the USE insns at the end of a function,
5698 the jump insn that returns, and then a BARRIER. */
5700 /* Move the USE insns at the end of a function onto a list. */
5701 while (prev
5702 && GET_CODE (prev) == INSN
5703 && GET_CODE (PATTERN (prev)) == USE)
5705 tem = prev;
5706 prev = prev_nonnote_insn (prev);
5708 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5709 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5710 if (first_use)
5712 NEXT_INSN (tem) = first_use;
5713 PREV_INSN (first_use) = tem;
5715 first_use = tem;
5716 if (!last_use)
5717 last_use = tem;
5720 emit_barrier_after (insn);
5722 seq = gen_epilogue ();
5723 tail = emit_jump_insn_after (seq, insn);
5725 /* Insert the USE insns immediately before the return insn, which
5726 must be the first instruction before the final barrier. */
5727 if (first_use)
5729 tem = prev_nonnote_insn (get_last_insn ());
5730 NEXT_INSN (PREV_INSN (tem)) = first_use;
5731 PREV_INSN (first_use) = PREV_INSN (tem);
5732 PREV_INSN (tem) = last_use;
5733 NEXT_INSN (last_use) = tem;
5736 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5738 /* Include the new epilogue insns in the last block. Ignore
5739 them if they form a basic block unto themselves. */
5740 if (basic_block_end && n_basic_blocks
5741 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5742 basic_block_end[n_basic_blocks - 1] = tail;
5744 /* Retain a map of the epilogue insns. */
5745 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5746 return;
5749 #endif
5750 epilogue = 0;
5753 /* Reposition the prologue-end and epilogue-begin notes after instruction
5754 scheduling and delayed branch scheduling. */
5756 void
5757 reposition_prologue_and_epilogue_notes (f)
5758 rtx f;
5760 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5761 /* Reposition the prologue and epilogue notes. */
5762 if (n_basic_blocks)
5764 rtx next, prev;
5765 int len;
5767 if (prologue)
5769 register rtx insn, note = 0;
5771 /* Scan from the beginning until we reach the last prologue insn.
5772 We apparently can't depend on basic_block_{head,end} after
5773 reorg has run. */
5774 for (len = 0; prologue[len]; len++)
5776 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5778 if (GET_CODE (insn) == NOTE)
5780 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5781 note = insn;
5783 else if ((len -= contains (insn, prologue)) == 0)
5785 /* Find the prologue-end note if we haven't already, and
5786 move it to just after the last prologue insn. */
5787 if (note == 0)
5789 for (note = insn; note = NEXT_INSN (note);)
5790 if (GET_CODE (note) == NOTE
5791 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5792 break;
5794 next = NEXT_INSN (note);
5795 prev = PREV_INSN (note);
5796 if (prev)
5797 NEXT_INSN (prev) = next;
5798 if (next)
5799 PREV_INSN (next) = prev;
5800 add_insn_after (note, insn);
5805 if (epilogue)
5807 register rtx insn, note = 0;
5809 /* Scan from the end until we reach the first epilogue insn.
5810 We apparently can't depend on basic_block_{head,end} after
5811 reorg has run. */
5812 for (len = 0; epilogue[len]; len++)
5814 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5816 if (GET_CODE (insn) == NOTE)
5818 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5819 note = insn;
5821 else if ((len -= contains (insn, epilogue)) == 0)
5823 /* Find the epilogue-begin note if we haven't already, and
5824 move it to just before the first epilogue insn. */
5825 if (note == 0)
5827 for (note = insn; note = PREV_INSN (note);)
5828 if (GET_CODE (note) == NOTE
5829 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5830 break;
5832 next = NEXT_INSN (note);
5833 prev = PREV_INSN (note);
5834 if (prev)
5835 NEXT_INSN (prev) = next;
5836 if (next)
5837 PREV_INSN (next) = prev;
5838 add_insn_after (note, PREV_INSN (insn));
5843 #endif /* HAVE_prologue or HAVE_epilogue */