[official-gcc.git] / gcc / function.c
blob00e7546b26882160d6654c97a03a4a297ac4643f
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
96 int current_function_pops_args;
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
101 int current_function_returns_struct;
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
106 int current_function_returns_pcc_struct;
108 /* Nonzero if function being compiled needs to be passed a static chain. */
110 int current_function_needs_context;
112 /* Nonzero if function being compiled can call setjmp. */
114 int current_function_calls_setjmp;
116 /* Nonzero if function being compiled can call longjmp. */
118 int current_function_calls_longjmp;
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
123 int current_function_has_nonlocal_label;
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
128 int current_function_has_nonlocal_goto;
130 /* Nonzero if this function has a computed goto.
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
135 int current_function_has_computed_jump;
137 /* Nonzero if function being compiled contains nested functions. */
139 int current_function_contains_functions;
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk;
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
149 int current_function_calls_alloca;
151 /* Nonzero if the current function returns a pointer type */
153 int current_function_returns_pointer;
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
158 rtx current_function_epilogue_delay_list;
160 /* If function's args have a fixed size, this is that size, in bytes.
161 Otherwise, it is -1.
162 May affect compilation of return insn or of function epilogue. */
164 int current_function_args_size;
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
169 int current_function_pretend_args_size;
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
174 int current_function_outgoing_args_size;
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
179 rtx current_function_arg_offset_rtx;
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
184 int current_function_varargs;
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
189 int current_function_stdarg;
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
194 CUMULATIVE_ARGS current_function_args_info;
196 /* Name of function now being compiled. */
198 char *current_function_name;
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
205 rtx current_function_return_rtx;
207 /* Nonzero if the current function uses the constant pool. */
209 int current_function_uses_const_pool;
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table;
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer;
217 /* The FUNCTION_DECL for an inline function currently being expanded. */
218 tree inline_function_decl;
220 /* Number of function calls seen so far in current function. */
222 int function_call_count;
224 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
225 (labels to which there can be nonlocal gotos from nested functions)
226 in this function. */
228 tree nonlocal_labels;
230 /* RTX for stack slot that holds the current handler for nonlocal gotos.
231 Zero when function does not have nonlocal labels. */
233 rtx nonlocal_goto_handler_slot;
235 /* RTX for stack slot that holds the stack pointer value to restore
236 for a nonlocal goto.
237 Zero when function does not have nonlocal labels. */
239 rtx nonlocal_goto_stack_level;
241 /* Label that will go on parm cleanup code, if any.
242 Jumping to this label runs cleanup code for parameters, if
243 such code must be run. Following this code is the logical return label. */
245 rtx cleanup_label;
247 /* Label that will go on function epilogue.
248 Jumping to this label serves as a "return" instruction
249 on machines which require execution of the epilogue on all returns. */
251 rtx return_label;
253 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
254 So we can mark them all live at the end of the function, if nonopt. */
255 rtx save_expr_regs;
257 /* List (chain of EXPR_LISTs) of all stack slots in this function.
258 Made for the sake of unshare_all_rtl. */
259 rtx stack_slot_list;
261 /* Chain of all RTL_EXPRs that have insns in them. */
262 tree rtl_expr_chain;
264 /* Label to jump back to for tail recursion, or 0 if we have
265 not yet needed one for this function. */
266 rtx tail_recursion_label;
268 /* Place after which to insert the tail_recursion_label if we need one. */
269 rtx tail_recursion_reentry;
271 /* Location at which to save the argument pointer if it will need to be
272 referenced. There are two cases where this is done: if nonlocal gotos
273 exist, or if vars stored at an offset from the argument pointer will be
274 needed by inner routines. */
276 rtx arg_pointer_save_area;
278 /* Offset to end of allocated area of stack frame.
279 If stack grows down, this is the address of the last stack slot allocated.
280 If stack grows up, this is the address for the next slot. */
281 HOST_WIDE_INT frame_offset;
283 /* List (chain of TREE_LISTs) of static chains for containing functions.
284 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
285 in an RTL_EXPR in the TREE_VALUE. */
286 static tree context_display;
288 /* List (chain of TREE_LISTs) of trampolines for nested functions.
289 The trampoline sets up the static chain and jumps to the function.
290 We supply the trampoline's address when the function's address is requested.
292 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
293 in an RTL_EXPR in the TREE_VALUE. */
294 static tree trampoline_list;
296 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
297 static rtx parm_birth_insn;
299 #if 0
300 /* Nonzero if a stack slot has been generated whose address is not
301 actually valid. It means that the generated rtl must all be scanned
302 to detect and correct the invalid addresses where they occur. */
303 static int invalid_stack_slot;
304 #endif
306 /* Last insn of those whose job was to put parms into their nominal homes. */
307 static rtx last_parm_insn;
309 /* 1 + last pseudo register number possibly used for loading a copy
310 of a parameter of this function. */
311 int max_parm_reg;
313 /* Vector indexed by REGNO, containing location on stack in which
314 to put the parm which is nominally in pseudo register REGNO,
315 if we discover that that parm must go in the stack. The highest
316 element in this vector is one less than MAX_PARM_REG, above. */
317 rtx *parm_reg_stack_loc;
319 /* Nonzero once virtual register instantiation has been done.
320 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
321 static int virtuals_instantiated;
323 /* These variables hold pointers to functions to
324 save and restore machine-specific data,
325 in push_function_context and pop_function_context. */
326 void (*save_machine_status) PROTO((struct function *));
327 void (*restore_machine_status) PROTO((struct function *));
329 /* Nonzero if we need to distinguish between the return value of this function
330 and the return value of a function called by this function. This helps
331 integrate.c */
333 extern int rtx_equal_function_value_matters;
334 extern tree sequence_rtl_expr;
336 /* In order to evaluate some expressions, such as function calls returning
337 structures in memory, we need to temporarily allocate stack locations.
338 We record each allocated temporary in the following structure.
340 Associated with each temporary slot is a nesting level. When we pop up
341 one level, all temporaries associated with the previous level are freed.
342 Normally, all temporaries are freed after the execution of the statement
343 in which they were created. However, if we are inside a ({...}) grouping,
344 the result may be in a temporary and hence must be preserved. If the
345 result could be in a temporary, we preserve it if we can determine which
346 one it is in. If we cannot determine which temporary may contain the
347 result, all temporaries are preserved. A temporary is preserved by
348 pretending it was allocated at the previous nesting level.
350 Automatic variables are also assigned temporary slots, at the nesting
351 level where they are defined. They are marked a "kept" so that
352 free_temp_slots will not free them. */
354 struct temp_slot
356 /* Points to next temporary slot. */
357 struct temp_slot *next;
358 /* The rtx to used to reference the slot. */
359 rtx slot;
360 /* The rtx used to represent the address if not the address of the
361 slot above. May be an EXPR_LIST if multiple addresses exist. */
362 rtx address;
363 /* The size, in units, of the slot. */
364 HOST_WIDE_INT size;
365 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
366 tree rtl_expr;
367 /* Non-zero if this temporary is currently in use. */
368 char in_use;
369 /* Non-zero if this temporary has its address taken. */
370 char addr_taken;
371 /* Nesting level at which this slot is being used. */
372 int level;
373 /* Non-zero if this should survive a call to free_temp_slots. */
374 int keep;
375 /* The offset of the slot from the frame_pointer, including extra space
376 for alignment. This info is for combine_temp_slots. */
377 HOST_WIDE_INT base_offset;
378 /* The size of the slot, including extra space for alignment. This
379 info is for combine_temp_slots. */
380 HOST_WIDE_INT full_size;
383 /* List of all temporaries allocated, both available and in use. */
385 struct temp_slot *temp_slots;
387 /* Current nesting level for temporaries. */
389 int temp_slot_level;
391 /* Current nesting level for variables in a block. */
393 int var_temp_slot_level;
395 /* When temporaries are created by TARGET_EXPRs, they are created at
396 this level of temp_slot_level, so that they can remain allocated
397 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
398 of TARGET_EXPRs. */
399 int target_temp_slot_level;
401 /* This structure is used to record MEMs or pseudos used to replace VAR, any
402 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
403 maintain this list in case two operands of an insn were required to match;
404 in that case we must ensure we use the same replacement. */
406 struct fixup_replacement
408 rtx old;
409 rtx new;
410 struct fixup_replacement *next;
413 /* Forward declarations. */
415 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
416 int, struct function *));
417 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
418 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
419 enum machine_mode, enum machine_mode,
420 int, int, int));
421 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
422 static struct fixup_replacement
423 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
424 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
425 rtx, int));
426 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
427 struct fixup_replacement **));
428 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
429 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
430 static rtx fixup_stack_1 PROTO((rtx, rtx));
431 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
432 static void instantiate_decls PROTO((tree, int));
433 static void instantiate_decls_1 PROTO((tree, int));
434 static void instantiate_decl PROTO((rtx, int, int));
435 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
436 static void delete_handlers PROTO((void));
437 static void pad_to_arg_alignment PROTO((struct args_size *, int));
438 #ifndef ARGS_GROW_DOWNWARD
439 static void pad_below PROTO((struct args_size *, enum machine_mode,
440 tree));
441 #endif
442 #ifdef ARGS_GROW_DOWNWARD
443 static tree round_down PROTO((tree, int));
444 #endif
445 static rtx round_trampoline_addr PROTO((rtx));
446 static tree blocks_nreverse PROTO((tree));
447 static int all_blocks PROTO((tree, tree *));
448 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
449 static int *record_insns PROTO((rtx));
450 static int contains PROTO((rtx, int *));
451 #endif /* HAVE_prologue || HAVE_epilogue */
452 static void put_addressof_into_stack PROTO((rtx));
453 static void purge_addressof_1 PROTO((rtx *, rtx, int));
455 /* Pointer to chain of `struct function' for containing functions. */
456 struct function *outer_function_chain;
458 /* Given a function decl for a containing function,
459 return the `struct function' for it. */
461 struct function *
462 find_function_data (decl)
463 tree decl;
465 struct function *p;
467 for (p = outer_function_chain; p; p = p->next)
468 if (p->decl == decl)
469 return p;
471 abort ();
474 /* Save the current context for compilation of a nested function.
475 This is called from language-specific code.
476 The caller is responsible for saving any language-specific status,
477 since this function knows only about language-independent variables. */
479 void
480 push_function_context_to (context)
481 tree context;
483 struct function *p = (struct function *) xmalloc (sizeof (struct function));
485 p->next = outer_function_chain;
486 outer_function_chain = p;
488 p->name = current_function_name;
489 p->decl = current_function_decl;
490 p->pops_args = current_function_pops_args;
491 p->returns_struct = current_function_returns_struct;
492 p->returns_pcc_struct = current_function_returns_pcc_struct;
493 p->returns_pointer = current_function_returns_pointer;
494 p->needs_context = current_function_needs_context;
495 p->calls_setjmp = current_function_calls_setjmp;
496 p->calls_longjmp = current_function_calls_longjmp;
497 p->calls_alloca = current_function_calls_alloca;
498 p->has_nonlocal_label = current_function_has_nonlocal_label;
499 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
500 p->contains_functions = current_function_contains_functions;
501 p->is_thunk = current_function_is_thunk;
502 p->args_size = current_function_args_size;
503 p->pretend_args_size = current_function_pretend_args_size;
504 p->arg_offset_rtx = current_function_arg_offset_rtx;
505 p->varargs = current_function_varargs;
506 p->stdarg = current_function_stdarg;
507 p->uses_const_pool = current_function_uses_const_pool;
508 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
509 p->internal_arg_pointer = current_function_internal_arg_pointer;
510 p->max_parm_reg = max_parm_reg;
511 p->parm_reg_stack_loc = parm_reg_stack_loc;
512 p->outgoing_args_size = current_function_outgoing_args_size;
513 p->return_rtx = current_function_return_rtx;
514 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
515 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
516 p->nonlocal_labels = nonlocal_labels;
517 p->cleanup_label = cleanup_label;
518 p->return_label = return_label;
519 p->save_expr_regs = save_expr_regs;
520 p->stack_slot_list = stack_slot_list;
521 p->parm_birth_insn = parm_birth_insn;
522 p->frame_offset = frame_offset;
523 p->tail_recursion_label = tail_recursion_label;
524 p->tail_recursion_reentry = tail_recursion_reentry;
525 p->arg_pointer_save_area = arg_pointer_save_area;
526 p->rtl_expr_chain = rtl_expr_chain;
527 p->last_parm_insn = last_parm_insn;
528 p->context_display = context_display;
529 p->trampoline_list = trampoline_list;
530 p->function_call_count = function_call_count;
531 p->temp_slots = temp_slots;
532 p->temp_slot_level = temp_slot_level;
533 p->target_temp_slot_level = target_temp_slot_level;
534 p->var_temp_slot_level = var_temp_slot_level;
535 p->fixup_var_refs_queue = 0;
536 p->epilogue_delay_list = current_function_epilogue_delay_list;
537 p->args_info = current_function_args_info;
539 save_tree_status (p, context);
540 save_storage_status (p);
541 save_emit_status (p);
542 save_expr_status (p);
543 save_stmt_status (p);
544 save_varasm_status (p, context);
545 if (save_machine_status)
546 (*save_machine_status) (p);
549 void
550 push_function_context ()
552 push_function_context_to (current_function_decl);
555 /* Restore the last saved context, at the end of a nested function.
556 This function is called from language-specific code. */
558 void
559 pop_function_context_from (context)
560 tree context;
562 struct function *p = outer_function_chain;
563 struct var_refs_queue *queue;
565 outer_function_chain = p->next;
567 current_function_contains_functions
568 = p->contains_functions || p->inline_obstacks
569 || context == current_function_decl;
570 current_function_name = p->name;
571 current_function_decl = p->decl;
572 current_function_pops_args = p->pops_args;
573 current_function_returns_struct = p->returns_struct;
574 current_function_returns_pcc_struct = p->returns_pcc_struct;
575 current_function_returns_pointer = p->returns_pointer;
576 current_function_needs_context = p->needs_context;
577 current_function_calls_setjmp = p->calls_setjmp;
578 current_function_calls_longjmp = p->calls_longjmp;
579 current_function_calls_alloca = p->calls_alloca;
580 current_function_has_nonlocal_label = p->has_nonlocal_label;
581 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
582 current_function_is_thunk = p->is_thunk;
583 current_function_args_size = p->args_size;
584 current_function_pretend_args_size = p->pretend_args_size;
585 current_function_arg_offset_rtx = p->arg_offset_rtx;
586 current_function_varargs = p->varargs;
587 current_function_stdarg = p->stdarg;
588 current_function_uses_const_pool = p->uses_const_pool;
589 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
590 current_function_internal_arg_pointer = p->internal_arg_pointer;
591 max_parm_reg = p->max_parm_reg;
592 parm_reg_stack_loc = p->parm_reg_stack_loc;
593 current_function_outgoing_args_size = p->outgoing_args_size;
594 current_function_return_rtx = p->return_rtx;
595 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
596 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
597 nonlocal_labels = p->nonlocal_labels;
598 cleanup_label = p->cleanup_label;
599 return_label = p->return_label;
600 save_expr_regs = p->save_expr_regs;
601 stack_slot_list = p->stack_slot_list;
602 parm_birth_insn = p->parm_birth_insn;
603 frame_offset = p->frame_offset;
604 tail_recursion_label = p->tail_recursion_label;
605 tail_recursion_reentry = p->tail_recursion_reentry;
606 arg_pointer_save_area = p->arg_pointer_save_area;
607 rtl_expr_chain = p->rtl_expr_chain;
608 last_parm_insn = p->last_parm_insn;
609 context_display = p->context_display;
610 trampoline_list = p->trampoline_list;
611 function_call_count = p->function_call_count;
612 temp_slots = p->temp_slots;
613 temp_slot_level = p->temp_slot_level;
614 target_temp_slot_level = p->target_temp_slot_level;
615 var_temp_slot_level = p->var_temp_slot_level;
616 current_function_epilogue_delay_list = p->epilogue_delay_list;
617 reg_renumber = 0;
618 current_function_args_info = p->args_info;
620 restore_tree_status (p, context);
621 restore_storage_status (p);
622 restore_expr_status (p);
623 restore_emit_status (p);
624 restore_stmt_status (p);
625 restore_varasm_status (p);
627 if (restore_machine_status)
628 (*restore_machine_status) (p);
630 /* Finish doing put_var_into_stack for any of our variables
631 which became addressable during the nested function. */
632 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
633 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
635 free (p);
637 /* Reset variables that have known state during rtx generation. */
638 rtx_equal_function_value_matters = 1;
639 virtuals_instantiated = 0;
642 void pop_function_context ()
644 pop_function_context_from (current_function_decl);
647 /* Allocate fixed slots in the stack frame of the current function. */
649 /* Return size needed for stack frame based on slots so far allocated.
650 This size counts from zero. It is not rounded to STACK_BOUNDARY;
651 the caller may have to do that. */
653 HOST_WIDE_INT
654 get_frame_size ()
656 #ifdef FRAME_GROWS_DOWNWARD
657 return -frame_offset;
658 #else
659 return frame_offset;
660 #endif
663 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
664 with machine mode MODE.
666 ALIGN controls the amount of alignment for the address of the slot:
667 0 means according to MODE,
668 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
669 positive specifies alignment boundary in bits.
671 We do not round to stack_boundary here. */
674 assign_stack_local (mode, size, align)
675 enum machine_mode mode;
676 HOST_WIDE_INT size;
677 int align;
679 register rtx x, addr;
680 int bigend_correction = 0;
681 int alignment;
683 if (align == 0)
685 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
686 if (mode == BLKmode)
687 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
689 else if (align == -1)
691 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
692 size = CEIL_ROUND (size, alignment);
694 else
695 alignment = align / BITS_PER_UNIT;
697 /* Round frame offset to that alignment.
698 We must be careful here, since FRAME_OFFSET might be negative and
699 division with a negative dividend isn't as well defined as we might
700 like. So we instead assume that ALIGNMENT is a power of two and
701 use logical operations which are unambiguous. */
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset = FLOOR_ROUND (frame_offset, alignment);
704 #else
705 frame_offset = CEIL_ROUND (frame_offset, alignment);
706 #endif
708 /* On a big-endian machine, if we are allocating more space than we will use,
709 use the least significant bytes of those that are allocated. */
710 if (BYTES_BIG_ENDIAN && mode != BLKmode)
711 bigend_correction = size - GET_MODE_SIZE (mode);
713 #ifdef FRAME_GROWS_DOWNWARD
714 frame_offset -= size;
715 #endif
717 /* If we have already instantiated virtual registers, return the actual
718 address relative to the frame pointer. */
719 if (virtuals_instantiated)
720 addr = plus_constant (frame_pointer_rtx,
721 (frame_offset + bigend_correction
722 + STARTING_FRAME_OFFSET));
723 else
724 addr = plus_constant (virtual_stack_vars_rtx,
725 frame_offset + bigend_correction);
727 #ifndef FRAME_GROWS_DOWNWARD
728 frame_offset += size;
729 #endif
731 x = gen_rtx_MEM (mode, addr);
733 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
735 return x;
738 /* Assign a stack slot in a containing function.
739 First three arguments are same as in preceding function.
740 The last argument specifies the function to allocate in. */
742 static rtx
743 assign_outer_stack_local (mode, size, align, function)
744 enum machine_mode mode;
745 HOST_WIDE_INT size;
746 int align;
747 struct function *function;
749 register rtx x, addr;
750 int bigend_correction = 0;
751 int alignment;
753 /* Allocate in the memory associated with the function in whose frame
754 we are assigning. */
755 push_obstacks (function->function_obstack,
756 function->function_maybepermanent_obstack);
758 if (align == 0)
760 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
761 if (mode == BLKmode)
762 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
764 else if (align == -1)
766 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
767 size = CEIL_ROUND (size, alignment);
769 else
770 alignment = align / BITS_PER_UNIT;
772 /* Round frame offset to that alignment. */
773 #ifdef FRAME_GROWS_DOWNWARD
774 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
775 #else
776 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
777 #endif
779 /* On a big-endian machine, if we are allocating more space than we will use,
780 use the least significant bytes of those that are allocated. */
781 if (BYTES_BIG_ENDIAN && mode != BLKmode)
782 bigend_correction = size - GET_MODE_SIZE (mode);
784 #ifdef FRAME_GROWS_DOWNWARD
785 function->frame_offset -= size;
786 #endif
787 addr = plus_constant (virtual_stack_vars_rtx,
788 function->frame_offset + bigend_correction);
789 #ifndef FRAME_GROWS_DOWNWARD
790 function->frame_offset += size;
791 #endif
793 x = gen_rtx_MEM (mode, addr);
795 function->stack_slot_list
796 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
798 pop_obstacks ();
800 return x;
803 /* Allocate a temporary stack slot and record it for possible later
804 reuse.
806 MODE is the machine mode to be given to the returned rtx.
808 SIZE is the size in units of the space required. We do no rounding here
809 since assign_stack_local will do any required rounding.
811 KEEP is 1 if this slot is to be retained after a call to
812 free_temp_slots. Automatic variables for a block are allocated
813 with this flag. KEEP is 2 if we allocate a longer term temporary,
814 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
815 if we are to allocate something at an inner level to be treated as
816 a variable in the block (e.g., a SAVE_EXPR). */
819 assign_stack_temp (mode, size, keep)
820 enum machine_mode mode;
821 HOST_WIDE_INT size;
822 int keep;
824 struct temp_slot *p, *best_p = 0;
826 /* If SIZE is -1 it means that somebody tried to allocate a temporary
827 of a variable size. */
828 if (size == -1)
829 abort ();
831 /* First try to find an available, already-allocated temporary that is the
832 exact size we require. */
833 for (p = temp_slots; p; p = p->next)
834 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
835 break;
837 /* If we didn't find, one, try one that is larger than what we want. We
838 find the smallest such. */
839 if (p == 0)
840 for (p = temp_slots; p; p = p->next)
841 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
842 && (best_p == 0 || best_p->size > p->size))
843 best_p = p;
845 /* Make our best, if any, the one to use. */
846 if (best_p)
848 /* If there are enough aligned bytes left over, make them into a new
849 temp_slot so that the extra bytes don't get wasted. Do this only
850 for BLKmode slots, so that we can be sure of the alignment. */
851 if (GET_MODE (best_p->slot) == BLKmode)
853 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
854 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
856 if (best_p->size - rounded_size >= alignment)
858 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
859 p->in_use = p->addr_taken = 0;
860 p->size = best_p->size - rounded_size;
861 p->base_offset = best_p->base_offset + rounded_size;
862 p->full_size = best_p->full_size - rounded_size;
863 p->slot = gen_rtx_MEM (BLKmode,
864 plus_constant (XEXP (best_p->slot, 0),
865 rounded_size));
866 p->address = 0;
867 p->rtl_expr = 0;
868 p->next = temp_slots;
869 temp_slots = p;
871 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
872 stack_slot_list);
874 best_p->size = rounded_size;
875 best_p->full_size = rounded_size;
879 p = best_p;
882 /* If we still didn't find one, make a new temporary. */
883 if (p == 0)
885 HOST_WIDE_INT frame_offset_old = frame_offset;
887 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
889 /* If the temp slot mode doesn't indicate the alignment,
890 use the largest possible, so no one will be disappointed. */
891 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
893 /* The following slot size computation is necessary because we don't
894 know the actual size of the temporary slot until assign_stack_local
895 has performed all the frame alignment and size rounding for the
896 requested temporary. Note that extra space added for alignment
897 can be either above or below this stack slot depending on which
898 way the frame grows. We include the extra space if and only if it
899 is above this slot. */
900 #ifdef FRAME_GROWS_DOWNWARD
901 p->size = frame_offset_old - frame_offset;
902 #else
903 p->size = size;
904 #endif
906 /* Now define the fields used by combine_temp_slots. */
907 #ifdef FRAME_GROWS_DOWNWARD
908 p->base_offset = frame_offset;
909 p->full_size = frame_offset_old - frame_offset;
910 #else
911 p->base_offset = frame_offset_old;
912 p->full_size = frame_offset - frame_offset_old;
913 #endif
914 p->address = 0;
915 p->next = temp_slots;
916 temp_slots = p;
919 p->in_use = 1;
920 p->addr_taken = 0;
921 p->rtl_expr = sequence_rtl_expr;
923 if (keep == 2)
925 p->level = target_temp_slot_level;
926 p->keep = 0;
928 else if (keep == 3)
930 p->level = var_temp_slot_level;
931 p->keep = 0;
933 else
935 p->level = temp_slot_level;
936 p->keep = keep;
939 /* We may be reusing an old slot, so clear any MEM flags that may have been
940 set from before. */
941 RTX_UNCHANGING_P (p->slot) = 0;
942 MEM_IN_STRUCT_P (p->slot) = 0;
943 return p->slot;
946 /* Assign a temporary of given TYPE.
947 KEEP is as for assign_stack_temp.
948 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
949 it is 0 if a register is OK.
950 DONT_PROMOTE is 1 if we should not promote values in register
951 to wider modes. */
954 assign_temp (type, keep, memory_required, dont_promote)
955 tree type;
956 int keep;
957 int memory_required;
958 int dont_promote;
960 enum machine_mode mode = TYPE_MODE (type);
961 int unsignedp = TREE_UNSIGNED (type);
963 if (mode == BLKmode || memory_required)
965 HOST_WIDE_INT size = int_size_in_bytes (type);
966 rtx tmp;
968 /* Unfortunately, we don't yet know how to allocate variable-sized
969 temporaries. However, sometimes we have a fixed upper limit on
970 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
971 instead. This is the case for Chill variable-sized strings. */
972 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
973 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
974 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
975 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
977 tmp = assign_stack_temp (mode, size, keep);
978 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
979 return tmp;
982 #ifndef PROMOTE_FOR_CALL_ONLY
983 if (! dont_promote)
984 mode = promote_mode (type, mode, &unsignedp, 0);
985 #endif
987 return gen_reg_rtx (mode);
990 /* Combine temporary stack slots which are adjacent on the stack.
992 This allows for better use of already allocated stack space. This is only
993 done for BLKmode slots because we can be sure that we won't have alignment
994 problems in this case. */
996 void
997 combine_temp_slots ()
999 struct temp_slot *p, *q;
1000 struct temp_slot *prev_p, *prev_q;
1001 int num_slots;
1003 /* If there are a lot of temp slots, don't do anything unless
1004 high levels of optimizaton. */
1005 if (! flag_expensive_optimizations)
1006 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1007 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1008 return;
1010 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1012 int delete_p = 0;
1014 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1015 for (q = p->next, prev_q = p; q; q = prev_q->next)
1017 int delete_q = 0;
1018 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1020 if (p->base_offset + p->full_size == q->base_offset)
1022 /* Q comes after P; combine Q into P. */
1023 p->size += q->size;
1024 p->full_size += q->full_size;
1025 delete_q = 1;
1027 else if (q->base_offset + q->full_size == p->base_offset)
1029 /* P comes after Q; combine P into Q. */
1030 q->size += p->size;
1031 q->full_size += p->full_size;
1032 delete_p = 1;
1033 break;
1036 /* Either delete Q or advance past it. */
1037 if (delete_q)
1038 prev_q->next = q->next;
1039 else
1040 prev_q = q;
1042 /* Either delete P or advance past it. */
1043 if (delete_p)
1045 if (prev_p)
1046 prev_p->next = p->next;
1047 else
1048 temp_slots = p->next;
1050 else
1051 prev_p = p;
1055 /* Find the temp slot corresponding to the object at address X. */
1057 static struct temp_slot *
1058 find_temp_slot_from_address (x)
1059 rtx x;
1061 struct temp_slot *p;
1062 rtx next;
1064 for (p = temp_slots; p; p = p->next)
1066 if (! p->in_use)
1067 continue;
1069 else if (XEXP (p->slot, 0) == x
1070 || p->address == x
1071 || (GET_CODE (x) == PLUS
1072 && XEXP (x, 0) == virtual_stack_vars_rtx
1073 && GET_CODE (XEXP (x, 1)) == CONST_INT
1074 && INTVAL (XEXP (x, 1)) >= p->base_offset
1075 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1076 return p;
1078 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1079 for (next = p->address; next; next = XEXP (next, 1))
1080 if (XEXP (next, 0) == x)
1081 return p;
1084 return 0;
1087 /* Indicate that NEW is an alternate way of referring to the temp slot
1088 that previously was known by OLD. */
1090 void
1091 update_temp_slot_address (old, new)
1092 rtx old, new;
1094 struct temp_slot *p = find_temp_slot_from_address (old);
1096 /* If none, return. Else add NEW as an alias. */
1097 if (p == 0)
1098 return;
1099 else if (p->address == 0)
1100 p->address = new;
1101 else
1103 if (GET_CODE (p->address) != EXPR_LIST)
1104 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1106 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1110 /* If X could be a reference to a temporary slot, mark the fact that its
1111 address was taken. */
1113 void
1114 mark_temp_addr_taken (x)
1115 rtx x;
1117 struct temp_slot *p;
1119 if (x == 0)
1120 return;
1122 /* If X is not in memory or is at a constant address, it cannot be in
1123 a temporary slot. */
1124 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1125 return;
1127 p = find_temp_slot_from_address (XEXP (x, 0));
1128 if (p != 0)
1129 p->addr_taken = 1;
1132 /* If X could be a reference to a temporary slot, mark that slot as
1133 belonging to the to one level higher than the current level. If X
1134 matched one of our slots, just mark that one. Otherwise, we can't
1135 easily predict which it is, so upgrade all of them. Kept slots
1136 need not be touched.
1138 This is called when an ({...}) construct occurs and a statement
1139 returns a value in memory. */
1141 void
1142 preserve_temp_slots (x)
1143 rtx x;
1145 struct temp_slot *p = 0;
1147 /* If there is no result, we still might have some objects whose address
1148 were taken, so we need to make sure they stay around. */
1149 if (x == 0)
1151 for (p = temp_slots; p; p = p->next)
1152 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1153 p->level--;
1155 return;
1158 /* If X is a register that is being used as a pointer, see if we have
1159 a temporary slot we know it points to. To be consistent with
1160 the code below, we really should preserve all non-kept slots
1161 if we can't find a match, but that seems to be much too costly. */
1162 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1163 p = find_temp_slot_from_address (x);
1165 /* If X is not in memory or is at a constant address, it cannot be in
1166 a temporary slot, but it can contain something whose address was
1167 taken. */
1168 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1170 for (p = temp_slots; p; p = p->next)
1171 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1172 p->level--;
1174 return;
1177 /* First see if we can find a match. */
1178 if (p == 0)
1179 p = find_temp_slot_from_address (XEXP (x, 0));
1181 if (p != 0)
1183 /* Move everything at our level whose address was taken to our new
1184 level in case we used its address. */
1185 struct temp_slot *q;
1187 if (p->level == temp_slot_level)
1189 for (q = temp_slots; q; q = q->next)
1190 if (q != p && q->addr_taken && q->level == p->level)
1191 q->level--;
1193 p->level--;
1194 p->addr_taken = 0;
1196 return;
1199 /* Otherwise, preserve all non-kept slots at this level. */
1200 for (p = temp_slots; p; p = p->next)
1201 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1202 p->level--;
1205 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1206 with that RTL_EXPR, promote it into a temporary slot at the present
1207 level so it will not be freed when we free slots made in the
1208 RTL_EXPR. */
1210 void
1211 preserve_rtl_expr_result (x)
1212 rtx x;
1214 struct temp_slot *p;
1216 /* If X is not in memory or is at a constant address, it cannot be in
1217 a temporary slot. */
1218 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1219 return;
1221 /* If we can find a match, move it to our level unless it is already at
1222 an upper level. */
1223 p = find_temp_slot_from_address (XEXP (x, 0));
1224 if (p != 0)
1226 p->level = MIN (p->level, temp_slot_level);
1227 p->rtl_expr = 0;
1230 return;
1233 /* Free all temporaries used so far. This is normally called at the end
1234 of generating code for a statement. Don't free any temporaries
1235 currently in use for an RTL_EXPR that hasn't yet been emitted.
1236 We could eventually do better than this since it can be reused while
1237 generating the same RTL_EXPR, but this is complex and probably not
1238 worthwhile. */
1240 void
1241 free_temp_slots ()
1243 struct temp_slot *p;
1245 for (p = temp_slots; p; p = p->next)
1246 if (p->in_use && p->level == temp_slot_level && ! p->keep
1247 && p->rtl_expr == 0)
1248 p->in_use = 0;
1250 combine_temp_slots ();
1253 /* Free all temporary slots used in T, an RTL_EXPR node. */
1255 void
1256 free_temps_for_rtl_expr (t)
1257 tree t;
1259 struct temp_slot *p;
1261 for (p = temp_slots; p; p = p->next)
1262 if (p->rtl_expr == t)
1263 p->in_use = 0;
1265 combine_temp_slots ();
1268 /* Mark all temporaries ever allocated in this function as not suitable
1269 for reuse until the current level is exited. */
1271 void
1272 mark_all_temps_used ()
1274 struct temp_slot *p;
1276 for (p = temp_slots; p; p = p->next)
1278 p->in_use = p->keep = 1;
1279 p->level = MIN (p->level, temp_slot_level);
1283 /* Push deeper into the nesting level for stack temporaries. */
1285 void
1286 push_temp_slots ()
1288 temp_slot_level++;
1291 /* Likewise, but save the new level as the place to allocate variables
1292 for blocks. */
1294 void
1295 push_temp_slots_for_block ()
1297 push_temp_slots ();
1299 var_temp_slot_level = temp_slot_level;
1302 /* Likewise, but save the new level as the place to allocate temporaries
1303 for TARGET_EXPRs. */
1305 void
1306 push_temp_slots_for_target ()
1308 push_temp_slots ();
1310 target_temp_slot_level = temp_slot_level;
1313 /* Set and get the value of target_temp_slot_level. The only
1314 permitted use of these functions is to save and restore this value. */
1317 get_target_temp_slot_level ()
1319 return target_temp_slot_level;
1322 void
1323 set_target_temp_slot_level (level)
1324 int level;
1326 target_temp_slot_level = level;
1329 /* Pop a temporary nesting level. All slots in use in the current level
1330 are freed. */
1332 void
1333 pop_temp_slots ()
1335 struct temp_slot *p;
1337 for (p = temp_slots; p; p = p->next)
1338 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1339 p->in_use = 0;
1341 combine_temp_slots ();
1343 temp_slot_level--;
1346 /* Initialize temporary slots. */
1348 void
1349 init_temp_slots ()
1351 /* We have not allocated any temporaries yet. */
1352 temp_slots = 0;
1353 temp_slot_level = 0;
1354 var_temp_slot_level = 0;
1355 target_temp_slot_level = 0;
1358 /* Retroactively move an auto variable from a register to a stack slot.
1359 This is done when an address-reference to the variable is seen. */
1361 void
1362 put_var_into_stack (decl)
1363 tree decl;
1365 register rtx reg;
1366 enum machine_mode promoted_mode, decl_mode;
1367 struct function *function = 0;
1368 tree context;
1369 int can_use_addressof;
1371 context = decl_function_context (decl);
1373 /* Get the current rtl used for this object and it's original mode. */
1374 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1376 /* No need to do anything if decl has no rtx yet
1377 since in that case caller is setting TREE_ADDRESSABLE
1378 and a stack slot will be assigned when the rtl is made. */
1379 if (reg == 0)
1380 return;
1382 /* Get the declared mode for this object. */
1383 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1384 : DECL_MODE (decl));
1385 /* Get the mode it's actually stored in. */
1386 promoted_mode = GET_MODE (reg);
1388 /* If this variable comes from an outer function,
1389 find that function's saved context. */
1390 if (context != current_function_decl && context != inline_function_decl)
1391 for (function = outer_function_chain; function; function = function->next)
1392 if (function->decl == context)
1393 break;
1395 /* If this is a variable-size object with a pseudo to address it,
1396 put that pseudo into the stack, if the var is nonlocal. */
1397 if (DECL_NONLOCAL (decl)
1398 && GET_CODE (reg) == MEM
1399 && GET_CODE (XEXP (reg, 0)) == REG
1400 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1402 reg = XEXP (reg, 0);
1403 decl_mode = promoted_mode = GET_MODE (reg);
1406 can_use_addressof
1407 = (function == 0
1408 && optimize > 0
1409 /* FIXME make it work for promoted modes too */
1410 && decl_mode == promoted_mode
1411 #ifdef NON_SAVING_SETJMP
1412 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1413 #endif
1416 /* If we can't use ADDRESSOF, make sure we see through one we already
1417 generated. */
1418 if (! can_use_addressof && GET_CODE (reg) == MEM
1419 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1420 reg = XEXP (XEXP (reg, 0), 0);
1422 /* Now we should have a value that resides in one or more pseudo regs. */
1424 if (GET_CODE (reg) == REG)
1426 /* If this variable lives in the current function and we don't need
1427 to put things in the stack for the sake of setjmp, try to keep it
1428 in a register until we know we actually need the address. */
1429 if (can_use_addressof)
1430 gen_mem_addressof (reg, decl);
1431 else
1432 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1433 promoted_mode, decl_mode,
1434 TREE_SIDE_EFFECTS (decl), 0,
1435 TREE_USED (decl)
1436 || DECL_INITIAL (decl) != 0);
1438 else if (GET_CODE (reg) == CONCAT)
1440 /* A CONCAT contains two pseudos; put them both in the stack.
1441 We do it so they end up consecutive. */
1442 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1443 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1444 #ifdef FRAME_GROWS_DOWNWARD
1445 /* Since part 0 should have a lower address, do it second. */
1446 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1447 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1448 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1449 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1450 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1451 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1452 #else
1453 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1454 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1455 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1456 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1457 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1458 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1459 #endif
1461 /* Change the CONCAT into a combined MEM for both parts. */
1462 PUT_CODE (reg, MEM);
1463 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1465 /* The two parts are in memory order already.
1466 Use the lower parts address as ours. */
1467 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1468 /* Prevent sharing of rtl that might lose. */
1469 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1470 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1472 else
1473 return;
1475 if (flag_check_memory_usage)
1476 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1477 XEXP (reg, 0), ptr_mode,
1478 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1479 TYPE_MODE (sizetype),
1480 GEN_INT (MEMORY_USE_RW),
1481 TYPE_MODE (integer_type_node));
1484 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1485 into the stack frame of FUNCTION (0 means the current function).
1486 DECL_MODE is the machine mode of the user-level data type.
1487 PROMOTED_MODE is the machine mode of the register.
1488 VOLATILE_P is nonzero if this is for a "volatile" decl.
1489 USED_P is nonzero if this reg might have already been used in an insn. */
1491 static void
1492 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1493 original_regno, used_p)
1494 struct function *function;
1495 rtx reg;
1496 tree type;
1497 enum machine_mode promoted_mode, decl_mode;
1498 int volatile_p;
1499 int original_regno;
1500 int used_p;
1502 rtx new = 0;
1503 int regno = original_regno;
1505 if (regno == 0)
1506 regno = REGNO (reg);
1508 if (function)
1510 if (regno < function->max_parm_reg)
1511 new = function->parm_reg_stack_loc[regno];
1512 if (new == 0)
1513 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1514 0, function);
1516 else
1518 if (regno < max_parm_reg)
1519 new = parm_reg_stack_loc[regno];
1520 if (new == 0)
1521 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1524 PUT_MODE (reg, decl_mode);
1525 XEXP (reg, 0) = XEXP (new, 0);
1526 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1527 MEM_VOLATILE_P (reg) = volatile_p;
1528 PUT_CODE (reg, MEM);
1530 /* If this is a memory ref that contains aggregate components,
1531 mark it as such for cse and loop optimize. If we are reusing a
1532 previously generated stack slot, then we need to copy the bit in
1533 case it was set for other reasons. For instance, it is set for
1534 __builtin_va_alist. */
1535 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1537 /* Now make sure that all refs to the variable, previously made
1538 when it was a register, are fixed up to be valid again. */
1540 if (used_p && function != 0)
1542 struct var_refs_queue *temp;
1544 /* Variable is inherited; fix it up when we get back to its function. */
1545 push_obstacks (function->function_obstack,
1546 function->function_maybepermanent_obstack);
1548 /* See comment in restore_tree_status in tree.c for why this needs to be
1549 on saveable obstack. */
1550 temp
1551 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1552 temp->modified = reg;
1553 temp->promoted_mode = promoted_mode;
1554 temp->unsignedp = TREE_UNSIGNED (type);
1555 temp->next = function->fixup_var_refs_queue;
1556 function->fixup_var_refs_queue = temp;
1557 pop_obstacks ();
1559 else if (used_p)
1560 /* Variable is local; fix it up now. */
1561 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1564 static void
1565 fixup_var_refs (var, promoted_mode, unsignedp)
1566 rtx var;
1567 enum machine_mode promoted_mode;
1568 int unsignedp;
1570 tree pending;
1571 rtx first_insn = get_insns ();
1572 struct sequence_stack *stack = sequence_stack;
1573 tree rtl_exps = rtl_expr_chain;
1575 /* Must scan all insns for stack-refs that exceed the limit. */
1576 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1578 /* Scan all pending sequences too. */
1579 for (; stack; stack = stack->next)
1581 push_to_sequence (stack->first);
1582 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1583 stack->first, stack->next != 0);
1584 /* Update remembered end of sequence
1585 in case we added an insn at the end. */
1586 stack->last = get_last_insn ();
1587 end_sequence ();
1590 /* Scan all waiting RTL_EXPRs too. */
1591 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1593 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1594 if (seq != const0_rtx && seq != 0)
1596 push_to_sequence (seq);
1597 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1598 end_sequence ();
1603 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1604 some part of an insn. Return a struct fixup_replacement whose OLD
1605 value is equal to X. Allocate a new structure if no such entry exists. */
1607 static struct fixup_replacement *
1608 find_fixup_replacement (replacements, x)
1609 struct fixup_replacement **replacements;
1610 rtx x;
1612 struct fixup_replacement *p;
1614 /* See if we have already replaced this. */
1615 for (p = *replacements; p && p->old != x; p = p->next)
1618 if (p == 0)
1620 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1621 p->old = x;
1622 p->new = 0;
1623 p->next = *replacements;
1624 *replacements = p;
1627 return p;
1630 /* Scan the insn-chain starting with INSN for refs to VAR
1631 and fix them up. TOPLEVEL is nonzero if this chain is the
1632 main chain of insns for the current function. */
1634 static void
1635 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1636 rtx var;
1637 enum machine_mode promoted_mode;
1638 int unsignedp;
1639 rtx insn;
1640 int toplevel;
1642 rtx call_dest = 0;
1644 while (insn)
1646 rtx next = NEXT_INSN (insn);
1647 rtx set, prev, prev_set;
1648 rtx note;
1650 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1652 /* If this is a CLOBBER of VAR, delete it.
1654 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1655 and REG_RETVAL notes too. */
1656 if (GET_CODE (PATTERN (insn)) == CLOBBER
1657 && XEXP (PATTERN (insn), 0) == var)
1659 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1660 /* The REG_LIBCALL note will go away since we are going to
1661 turn INSN into a NOTE, so just delete the
1662 corresponding REG_RETVAL note. */
1663 remove_note (XEXP (note, 0),
1664 find_reg_note (XEXP (note, 0), REG_RETVAL,
1665 NULL_RTX));
1667 /* In unoptimized compilation, we shouldn't call delete_insn
1668 except in jump.c doing warnings. */
1669 PUT_CODE (insn, NOTE);
1670 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1671 NOTE_SOURCE_FILE (insn) = 0;
1674 /* The insn to load VAR from a home in the arglist
1675 is now a no-op. When we see it, just delete it.
1676 Similarly if this is storing VAR from a register from which
1677 it was loaded in the previous insn. This will occur
1678 when an ADDRESSOF was made for an arglist slot. */
1679 else if (toplevel
1680 && (set = single_set (insn)) != 0
1681 && SET_DEST (set) == var
1682 /* If this represents the result of an insn group,
1683 don't delete the insn. */
1684 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1685 && (rtx_equal_p (SET_SRC (set), var)
1686 || (GET_CODE (SET_SRC (set)) == REG
1687 && (prev = prev_nonnote_insn (insn)) != 0
1688 && (prev_set = single_set (prev)) != 0
1689 && SET_DEST (prev_set) == SET_SRC (set)
1690 && rtx_equal_p (SET_SRC (prev_set), var))))
1692 /* In unoptimized compilation, we shouldn't call delete_insn
1693 except in jump.c doing warnings. */
1694 PUT_CODE (insn, NOTE);
1695 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1696 NOTE_SOURCE_FILE (insn) = 0;
1697 if (insn == last_parm_insn)
1698 last_parm_insn = PREV_INSN (next);
1700 else
1702 struct fixup_replacement *replacements = 0;
1703 rtx next_insn = NEXT_INSN (insn);
1705 if (SMALL_REGISTER_CLASSES)
1707 /* If the insn that copies the results of a CALL_INSN
1708 into a pseudo now references VAR, we have to use an
1709 intermediate pseudo since we want the life of the
1710 return value register to be only a single insn.
1712 If we don't use an intermediate pseudo, such things as
1713 address computations to make the address of VAR valid
1714 if it is not can be placed between the CALL_INSN and INSN.
1716 To make sure this doesn't happen, we record the destination
1717 of the CALL_INSN and see if the next insn uses both that
1718 and VAR. */
1720 if (call_dest != 0 && GET_CODE (insn) == INSN
1721 && reg_mentioned_p (var, PATTERN (insn))
1722 && reg_mentioned_p (call_dest, PATTERN (insn)))
1724 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1726 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1728 PATTERN (insn) = replace_rtx (PATTERN (insn),
1729 call_dest, temp);
1732 if (GET_CODE (insn) == CALL_INSN
1733 && GET_CODE (PATTERN (insn)) == SET)
1734 call_dest = SET_DEST (PATTERN (insn));
1735 else if (GET_CODE (insn) == CALL_INSN
1736 && GET_CODE (PATTERN (insn)) == PARALLEL
1737 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1738 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1739 else
1740 call_dest = 0;
1743 /* See if we have to do anything to INSN now that VAR is in
1744 memory. If it needs to be loaded into a pseudo, use a single
1745 pseudo for the entire insn in case there is a MATCH_DUP
1746 between two operands. We pass a pointer to the head of
1747 a list of struct fixup_replacements. If fixup_var_refs_1
1748 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1749 it will record them in this list.
1751 If it allocated a pseudo for any replacement, we copy into
1752 it here. */
1754 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1755 &replacements);
1757 /* If this is last_parm_insn, and any instructions were output
1758 after it to fix it up, then we must set last_parm_insn to
1759 the last such instruction emitted. */
1760 if (insn == last_parm_insn)
1761 last_parm_insn = PREV_INSN (next_insn);
1763 while (replacements)
1765 if (GET_CODE (replacements->new) == REG)
1767 rtx insert_before;
1768 rtx seq;
1770 /* OLD might be a (subreg (mem)). */
1771 if (GET_CODE (replacements->old) == SUBREG)
1772 replacements->old
1773 = fixup_memory_subreg (replacements->old, insn, 0);
1774 else
1775 replacements->old
1776 = fixup_stack_1 (replacements->old, insn);
1778 insert_before = insn;
1780 /* If we are changing the mode, do a conversion.
1781 This might be wasteful, but combine.c will
1782 eliminate much of the waste. */
1784 if (GET_MODE (replacements->new)
1785 != GET_MODE (replacements->old))
1787 start_sequence ();
1788 convert_move (replacements->new,
1789 replacements->old, unsignedp);
1790 seq = gen_sequence ();
1791 end_sequence ();
1793 else
1794 seq = gen_move_insn (replacements->new,
1795 replacements->old);
1797 emit_insn_before (seq, insert_before);
1800 replacements = replacements->next;
1804 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1805 But don't touch other insns referred to by reg-notes;
1806 we will get them elsewhere. */
1807 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1808 if (GET_CODE (note) != INSN_LIST)
1809 XEXP (note, 0)
1810 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1812 insn = next;
1816 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1817 See if the rtx expression at *LOC in INSN needs to be changed.
1819 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1820 contain a list of original rtx's and replacements. If we find that we need
1821 to modify this insn by replacing a memory reference with a pseudo or by
1822 making a new MEM to implement a SUBREG, we consult that list to see if
1823 we have already chosen a replacement. If none has already been allocated,
1824 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1825 or the SUBREG, as appropriate, to the pseudo. */
1827 static void
1828 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1829 register rtx var;
1830 enum machine_mode promoted_mode;
1831 register rtx *loc;
1832 rtx insn;
1833 struct fixup_replacement **replacements;
1835 register int i;
1836 register rtx x = *loc;
1837 RTX_CODE code = GET_CODE (x);
1838 register char *fmt;
1839 register rtx tem, tem1;
1840 struct fixup_replacement *replacement;
1842 switch (code)
1844 case ADDRESSOF:
1845 if (XEXP (x, 0) == var)
1847 /* Prevent sharing of rtl that might lose. */
1848 rtx sub = copy_rtx (XEXP (var, 0));
1850 start_sequence ();
1852 if (! validate_change (insn, loc, sub, 0))
1854 rtx y = force_operand (sub, NULL_RTX);
1856 if (! validate_change (insn, loc, y, 0))
1857 *loc = copy_to_reg (y);
1860 emit_insn_before (gen_sequence (), insn);
1861 end_sequence ();
1863 return;
1865 case MEM:
1866 if (var == x)
1868 /* If we already have a replacement, use it. Otherwise,
1869 try to fix up this address in case it is invalid. */
1871 replacement = find_fixup_replacement (replacements, var);
1872 if (replacement->new)
1874 *loc = replacement->new;
1875 return;
1878 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1880 /* Unless we are forcing memory to register or we changed the mode,
1881 we can leave things the way they are if the insn is valid. */
1883 INSN_CODE (insn) = -1;
1884 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1885 && recog_memoized (insn) >= 0)
1886 return;
1888 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1889 return;
1892 /* If X contains VAR, we need to unshare it here so that we update
1893 each occurrence separately. But all identical MEMs in one insn
1894 must be replaced with the same rtx because of the possibility of
1895 MATCH_DUPs. */
1897 if (reg_mentioned_p (var, x))
1899 replacement = find_fixup_replacement (replacements, x);
1900 if (replacement->new == 0)
1901 replacement->new = copy_most_rtx (x, var);
1903 *loc = x = replacement->new;
1905 break;
1907 case REG:
1908 case CC0:
1909 case PC:
1910 case CONST_INT:
1911 case CONST:
1912 case SYMBOL_REF:
1913 case LABEL_REF:
1914 case CONST_DOUBLE:
1915 return;
1917 case SIGN_EXTRACT:
1918 case ZERO_EXTRACT:
1919 /* Note that in some cases those types of expressions are altered
1920 by optimize_bit_field, and do not survive to get here. */
1921 if (XEXP (x, 0) == var
1922 || (GET_CODE (XEXP (x, 0)) == SUBREG
1923 && SUBREG_REG (XEXP (x, 0)) == var))
1925 /* Get TEM as a valid MEM in the mode presently in the insn.
1927 We don't worry about the possibility of MATCH_DUP here; it
1928 is highly unlikely and would be tricky to handle. */
1930 tem = XEXP (x, 0);
1931 if (GET_CODE (tem) == SUBREG)
1933 if (GET_MODE_BITSIZE (GET_MODE (tem))
1934 > GET_MODE_BITSIZE (GET_MODE (var)))
1936 replacement = find_fixup_replacement (replacements, var);
1937 if (replacement->new == 0)
1938 replacement->new = gen_reg_rtx (GET_MODE (var));
1939 SUBREG_REG (tem) = replacement->new;
1941 else
1942 tem = fixup_memory_subreg (tem, insn, 0);
1944 else
1945 tem = fixup_stack_1 (tem, insn);
1947 /* Unless we want to load from memory, get TEM into the proper mode
1948 for an extract from memory. This can only be done if the
1949 extract is at a constant position and length. */
1951 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1952 && GET_CODE (XEXP (x, 2)) == CONST_INT
1953 && ! mode_dependent_address_p (XEXP (tem, 0))
1954 && ! MEM_VOLATILE_P (tem))
1956 enum machine_mode wanted_mode = VOIDmode;
1957 enum machine_mode is_mode = GET_MODE (tem);
1958 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1960 #ifdef HAVE_extzv
1961 if (GET_CODE (x) == ZERO_EXTRACT)
1962 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1963 #endif
1964 #ifdef HAVE_extv
1965 if (GET_CODE (x) == SIGN_EXTRACT)
1966 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1967 #endif
1968 /* If we have a narrower mode, we can do something. */
1969 if (wanted_mode != VOIDmode
1970 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1972 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1973 rtx old_pos = XEXP (x, 2);
1974 rtx newmem;
1976 /* If the bytes and bits are counted differently, we
1977 must adjust the offset. */
1978 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1979 offset = (GET_MODE_SIZE (is_mode)
1980 - GET_MODE_SIZE (wanted_mode) - offset);
1982 pos %= GET_MODE_BITSIZE (wanted_mode);
1984 newmem = gen_rtx_MEM (wanted_mode,
1985 plus_constant (XEXP (tem, 0), offset));
1986 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1987 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1988 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1990 /* Make the change and see if the insn remains valid. */
1991 INSN_CODE (insn) = -1;
1992 XEXP (x, 0) = newmem;
1993 XEXP (x, 2) = GEN_INT (pos);
1995 if (recog_memoized (insn) >= 0)
1996 return;
1998 /* Otherwise, restore old position. XEXP (x, 0) will be
1999 restored later. */
2000 XEXP (x, 2) = old_pos;
2004 /* If we get here, the bitfield extract insn can't accept a memory
2005 reference. Copy the input into a register. */
2007 tem1 = gen_reg_rtx (GET_MODE (tem));
2008 emit_insn_before (gen_move_insn (tem1, tem), insn);
2009 XEXP (x, 0) = tem1;
2010 return;
2012 break;
2014 case SUBREG:
2015 if (SUBREG_REG (x) == var)
2017 /* If this is a special SUBREG made because VAR was promoted
2018 from a wider mode, replace it with VAR and call ourself
2019 recursively, this time saying that the object previously
2020 had its current mode (by virtue of the SUBREG). */
2022 if (SUBREG_PROMOTED_VAR_P (x))
2024 *loc = var;
2025 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2026 return;
2029 /* If this SUBREG makes VAR wider, it has become a paradoxical
2030 SUBREG with VAR in memory, but these aren't allowed at this
2031 stage of the compilation. So load VAR into a pseudo and take
2032 a SUBREG of that pseudo. */
2033 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2035 replacement = find_fixup_replacement (replacements, var);
2036 if (replacement->new == 0)
2037 replacement->new = gen_reg_rtx (GET_MODE (var));
2038 SUBREG_REG (x) = replacement->new;
2039 return;
2042 /* See if we have already found a replacement for this SUBREG.
2043 If so, use it. Otherwise, make a MEM and see if the insn
2044 is recognized. If not, or if we should force MEM into a register,
2045 make a pseudo for this SUBREG. */
2046 replacement = find_fixup_replacement (replacements, x);
2047 if (replacement->new)
2049 *loc = replacement->new;
2050 return;
2053 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2055 INSN_CODE (insn) = -1;
2056 if (! flag_force_mem && recog_memoized (insn) >= 0)
2057 return;
2059 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2060 return;
2062 break;
2064 case SET:
2065 /* First do special simplification of bit-field references. */
2066 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2067 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2068 optimize_bit_field (x, insn, 0);
2069 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2070 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2071 optimize_bit_field (x, insn, NULL_PTR);
2073 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2074 into a register and then store it back out. */
2075 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2076 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2077 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2078 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2079 > GET_MODE_SIZE (GET_MODE (var))))
2081 replacement = find_fixup_replacement (replacements, var);
2082 if (replacement->new == 0)
2083 replacement->new = gen_reg_rtx (GET_MODE (var));
2085 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2086 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2089 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2090 insn into a pseudo and store the low part of the pseudo into VAR. */
2091 if (GET_CODE (SET_DEST (x)) == SUBREG
2092 && SUBREG_REG (SET_DEST (x)) == var
2093 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2094 > GET_MODE_SIZE (GET_MODE (var))))
2096 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2097 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2098 tem)),
2099 insn);
2100 break;
2104 rtx dest = SET_DEST (x);
2105 rtx src = SET_SRC (x);
2106 #ifdef HAVE_insv
2107 rtx outerdest = dest;
2108 #endif
2110 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2111 || GET_CODE (dest) == SIGN_EXTRACT
2112 || GET_CODE (dest) == ZERO_EXTRACT)
2113 dest = XEXP (dest, 0);
2115 if (GET_CODE (src) == SUBREG)
2116 src = XEXP (src, 0);
2118 /* If VAR does not appear at the top level of the SET
2119 just scan the lower levels of the tree. */
2121 if (src != var && dest != var)
2122 break;
2124 /* We will need to rerecognize this insn. */
2125 INSN_CODE (insn) = -1;
2127 #ifdef HAVE_insv
2128 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2130 /* Since this case will return, ensure we fixup all the
2131 operands here. */
2132 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2133 insn, replacements);
2134 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2135 insn, replacements);
2136 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2137 insn, replacements);
2139 tem = XEXP (outerdest, 0);
2141 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2142 that may appear inside a ZERO_EXTRACT.
2143 This was legitimate when the MEM was a REG. */
2144 if (GET_CODE (tem) == SUBREG
2145 && SUBREG_REG (tem) == var)
2146 tem = fixup_memory_subreg (tem, insn, 0);
2147 else
2148 tem = fixup_stack_1 (tem, insn);
2150 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2151 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2152 && ! mode_dependent_address_p (XEXP (tem, 0))
2153 && ! MEM_VOLATILE_P (tem))
2155 enum machine_mode wanted_mode
2156 = insn_operand_mode[(int) CODE_FOR_insv][0];
2157 enum machine_mode is_mode = GET_MODE (tem);
2158 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2160 /* If we have a narrower mode, we can do something. */
2161 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2163 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2164 rtx old_pos = XEXP (outerdest, 2);
2165 rtx newmem;
2167 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2168 offset = (GET_MODE_SIZE (is_mode)
2169 - GET_MODE_SIZE (wanted_mode) - offset);
2171 pos %= GET_MODE_BITSIZE (wanted_mode);
2173 newmem = gen_rtx_MEM (wanted_mode,
2174 plus_constant (XEXP (tem, 0), offset));
2175 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2176 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2177 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2179 /* Make the change and see if the insn remains valid. */
2180 INSN_CODE (insn) = -1;
2181 XEXP (outerdest, 0) = newmem;
2182 XEXP (outerdest, 2) = GEN_INT (pos);
2184 if (recog_memoized (insn) >= 0)
2185 return;
2187 /* Otherwise, restore old position. XEXP (x, 0) will be
2188 restored later. */
2189 XEXP (outerdest, 2) = old_pos;
2193 /* If we get here, the bit-field store doesn't allow memory
2194 or isn't located at a constant position. Load the value into
2195 a register, do the store, and put it back into memory. */
2197 tem1 = gen_reg_rtx (GET_MODE (tem));
2198 emit_insn_before (gen_move_insn (tem1, tem), insn);
2199 emit_insn_after (gen_move_insn (tem, tem1), insn);
2200 XEXP (outerdest, 0) = tem1;
2201 return;
2203 #endif
2205 /* STRICT_LOW_PART is a no-op on memory references
2206 and it can cause combinations to be unrecognizable,
2207 so eliminate it. */
2209 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2210 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2212 /* A valid insn to copy VAR into or out of a register
2213 must be left alone, to avoid an infinite loop here.
2214 If the reference to VAR is by a subreg, fix that up,
2215 since SUBREG is not valid for a memref.
2216 Also fix up the address of the stack slot.
2218 Note that we must not try to recognize the insn until
2219 after we know that we have valid addresses and no
2220 (subreg (mem ...) ...) constructs, since these interfere
2221 with determining the validity of the insn. */
2223 if ((SET_SRC (x) == var
2224 || (GET_CODE (SET_SRC (x)) == SUBREG
2225 && SUBREG_REG (SET_SRC (x)) == var))
2226 && (GET_CODE (SET_DEST (x)) == REG
2227 || (GET_CODE (SET_DEST (x)) == SUBREG
2228 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2229 && GET_MODE (var) == promoted_mode
2230 && x == single_set (insn))
2232 rtx pat;
2234 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2235 if (replacement->new)
2236 SET_SRC (x) = replacement->new;
2237 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2238 SET_SRC (x) = replacement->new
2239 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2240 else
2241 SET_SRC (x) = replacement->new
2242 = fixup_stack_1 (SET_SRC (x), insn);
2244 if (recog_memoized (insn) >= 0)
2245 return;
2247 /* INSN is not valid, but we know that we want to
2248 copy SET_SRC (x) to SET_DEST (x) in some way. So
2249 we generate the move and see whether it requires more
2250 than one insn. If it does, we emit those insns and
2251 delete INSN. Otherwise, we an just replace the pattern
2252 of INSN; we have already verified above that INSN has
2253 no other function that to do X. */
2255 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2256 if (GET_CODE (pat) == SEQUENCE)
2258 emit_insn_after (pat, insn);
2259 PUT_CODE (insn, NOTE);
2260 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2261 NOTE_SOURCE_FILE (insn) = 0;
2263 else
2264 PATTERN (insn) = pat;
2266 return;
2269 if ((SET_DEST (x) == var
2270 || (GET_CODE (SET_DEST (x)) == SUBREG
2271 && SUBREG_REG (SET_DEST (x)) == var))
2272 && (GET_CODE (SET_SRC (x)) == REG
2273 || (GET_CODE (SET_SRC (x)) == SUBREG
2274 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2275 && GET_MODE (var) == promoted_mode
2276 && x == single_set (insn))
2278 rtx pat;
2280 if (GET_CODE (SET_DEST (x)) == SUBREG)
2281 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2282 else
2283 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2285 if (recog_memoized (insn) >= 0)
2286 return;
2288 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2289 if (GET_CODE (pat) == SEQUENCE)
2291 emit_insn_after (pat, insn);
2292 PUT_CODE (insn, NOTE);
2293 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2294 NOTE_SOURCE_FILE (insn) = 0;
2296 else
2297 PATTERN (insn) = pat;
2299 return;
2302 /* Otherwise, storing into VAR must be handled specially
2303 by storing into a temporary and copying that into VAR
2304 with a new insn after this one. Note that this case
2305 will be used when storing into a promoted scalar since
2306 the insn will now have different modes on the input
2307 and output and hence will be invalid (except for the case
2308 of setting it to a constant, which does not need any
2309 change if it is valid). We generate extra code in that case,
2310 but combine.c will eliminate it. */
2312 if (dest == var)
2314 rtx temp;
2315 rtx fixeddest = SET_DEST (x);
2317 /* STRICT_LOW_PART can be discarded, around a MEM. */
2318 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2319 fixeddest = XEXP (fixeddest, 0);
2320 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2321 if (GET_CODE (fixeddest) == SUBREG)
2323 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2324 promoted_mode = GET_MODE (fixeddest);
2326 else
2327 fixeddest = fixup_stack_1 (fixeddest, insn);
2329 temp = gen_reg_rtx (promoted_mode);
2331 emit_insn_after (gen_move_insn (fixeddest,
2332 gen_lowpart (GET_MODE (fixeddest),
2333 temp)),
2334 insn);
2336 SET_DEST (x) = temp;
2340 default:
2341 break;
2344 /* Nothing special about this RTX; fix its operands. */
2346 fmt = GET_RTX_FORMAT (code);
2347 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2349 if (fmt[i] == 'e')
2350 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2351 if (fmt[i] == 'E')
2353 register int j;
2354 for (j = 0; j < XVECLEN (x, i); j++)
2355 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2356 insn, replacements);
2361 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2362 return an rtx (MEM:m1 newaddr) which is equivalent.
2363 If any insns must be emitted to compute NEWADDR, put them before INSN.
2365 UNCRITICAL nonzero means accept paradoxical subregs.
2366 This is used for subregs found inside REG_NOTES. */
2368 static rtx
2369 fixup_memory_subreg (x, insn, uncritical)
2370 rtx x;
2371 rtx insn;
2372 int uncritical;
2374 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2375 rtx addr = XEXP (SUBREG_REG (x), 0);
2376 enum machine_mode mode = GET_MODE (x);
2377 rtx result;
2379 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2380 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2381 && ! uncritical)
2382 abort ();
2384 if (BYTES_BIG_ENDIAN)
2385 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2386 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2387 addr = plus_constant (addr, offset);
2388 if (!flag_force_addr && memory_address_p (mode, addr))
2389 /* Shortcut if no insns need be emitted. */
2390 return change_address (SUBREG_REG (x), mode, addr);
2391 start_sequence ();
2392 result = change_address (SUBREG_REG (x), mode, addr);
2393 emit_insn_before (gen_sequence (), insn);
2394 end_sequence ();
2395 return result;
2398 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2399 Replace subexpressions of X in place.
2400 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2401 Otherwise return X, with its contents possibly altered.
2403 If any insns must be emitted to compute NEWADDR, put them before INSN.
2405 UNCRITICAL is as in fixup_memory_subreg. */
2407 static rtx
2408 walk_fixup_memory_subreg (x, insn, uncritical)
2409 register rtx x;
2410 rtx insn;
2411 int uncritical;
2413 register enum rtx_code code;
2414 register char *fmt;
2415 register int i;
2417 if (x == 0)
2418 return 0;
2420 code = GET_CODE (x);
2422 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2423 return fixup_memory_subreg (x, insn, uncritical);
2425 /* Nothing special about this RTX; fix its operands. */
2427 fmt = GET_RTX_FORMAT (code);
2428 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2430 if (fmt[i] == 'e')
2431 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2432 if (fmt[i] == 'E')
2434 register int j;
2435 for (j = 0; j < XVECLEN (x, i); j++)
2436 XVECEXP (x, i, j)
2437 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2440 return x;
2443 /* For each memory ref within X, if it refers to a stack slot
2444 with an out of range displacement, put the address in a temp register
2445 (emitting new insns before INSN to load these registers)
2446 and alter the memory ref to use that register.
2447 Replace each such MEM rtx with a copy, to avoid clobberage. */
2449 static rtx
2450 fixup_stack_1 (x, insn)
2451 rtx x;
2452 rtx insn;
2454 register int i;
2455 register RTX_CODE code = GET_CODE (x);
2456 register char *fmt;
2458 if (code == MEM)
2460 register rtx ad = XEXP (x, 0);
2461 /* If we have address of a stack slot but it's not valid
2462 (displacement is too large), compute the sum in a register. */
2463 if (GET_CODE (ad) == PLUS
2464 && GET_CODE (XEXP (ad, 0)) == REG
2465 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2466 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2467 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2468 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2469 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2470 #endif
2471 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2472 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2473 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2474 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2476 rtx temp, seq;
2477 if (memory_address_p (GET_MODE (x), ad))
2478 return x;
2480 start_sequence ();
2481 temp = copy_to_reg (ad);
2482 seq = gen_sequence ();
2483 end_sequence ();
2484 emit_insn_before (seq, insn);
2485 return change_address (x, VOIDmode, temp);
2487 return x;
2490 fmt = GET_RTX_FORMAT (code);
2491 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2493 if (fmt[i] == 'e')
2494 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2495 if (fmt[i] == 'E')
2497 register int j;
2498 for (j = 0; j < XVECLEN (x, i); j++)
2499 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2502 return x;
2505 /* Optimization: a bit-field instruction whose field
2506 happens to be a byte or halfword in memory
2507 can be changed to a move instruction.
2509 We call here when INSN is an insn to examine or store into a bit-field.
2510 BODY is the SET-rtx to be altered.
2512 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2513 (Currently this is called only from function.c, and EQUIV_MEM
2514 is always 0.) */
2516 static void
2517 optimize_bit_field (body, insn, equiv_mem)
2518 rtx body;
2519 rtx insn;
2520 rtx *equiv_mem;
2522 register rtx bitfield;
2523 int destflag;
2524 rtx seq = 0;
2525 enum machine_mode mode;
2527 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2528 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2529 bitfield = SET_DEST (body), destflag = 1;
2530 else
2531 bitfield = SET_SRC (body), destflag = 0;
2533 /* First check that the field being stored has constant size and position
2534 and is in fact a byte or halfword suitably aligned. */
2536 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2537 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2538 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2539 != BLKmode)
2540 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2542 register rtx memref = 0;
2544 /* Now check that the containing word is memory, not a register,
2545 and that it is safe to change the machine mode. */
2547 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2548 memref = XEXP (bitfield, 0);
2549 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2550 && equiv_mem != 0)
2551 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2552 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2553 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2554 memref = SUBREG_REG (XEXP (bitfield, 0));
2555 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2556 && equiv_mem != 0
2557 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2558 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2560 if (memref
2561 && ! mode_dependent_address_p (XEXP (memref, 0))
2562 && ! MEM_VOLATILE_P (memref))
2564 /* Now adjust the address, first for any subreg'ing
2565 that we are now getting rid of,
2566 and then for which byte of the word is wanted. */
2568 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2569 rtx insns;
2571 /* Adjust OFFSET to count bits from low-address byte. */
2572 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2573 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2574 - offset - INTVAL (XEXP (bitfield, 1)));
2576 /* Adjust OFFSET to count bytes from low-address byte. */
2577 offset /= BITS_PER_UNIT;
2578 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2580 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2581 if (BYTES_BIG_ENDIAN)
2582 offset -= (MIN (UNITS_PER_WORD,
2583 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2584 - MIN (UNITS_PER_WORD,
2585 GET_MODE_SIZE (GET_MODE (memref))));
2588 start_sequence ();
2589 memref = change_address (memref, mode,
2590 plus_constant (XEXP (memref, 0), offset));
2591 insns = get_insns ();
2592 end_sequence ();
2593 emit_insns_before (insns, insn);
2595 /* Store this memory reference where
2596 we found the bit field reference. */
2598 if (destflag)
2600 validate_change (insn, &SET_DEST (body), memref, 1);
2601 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2603 rtx src = SET_SRC (body);
2604 while (GET_CODE (src) == SUBREG
2605 && SUBREG_WORD (src) == 0)
2606 src = SUBREG_REG (src);
2607 if (GET_MODE (src) != GET_MODE (memref))
2608 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2609 validate_change (insn, &SET_SRC (body), src, 1);
2611 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2612 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2613 /* This shouldn't happen because anything that didn't have
2614 one of these modes should have got converted explicitly
2615 and then referenced through a subreg.
2616 This is so because the original bit-field was
2617 handled by agg_mode and so its tree structure had
2618 the same mode that memref now has. */
2619 abort ();
2621 else
2623 rtx dest = SET_DEST (body);
2625 while (GET_CODE (dest) == SUBREG
2626 && SUBREG_WORD (dest) == 0
2627 && (GET_MODE_CLASS (GET_MODE (dest))
2628 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2629 dest = SUBREG_REG (dest);
2631 validate_change (insn, &SET_DEST (body), dest, 1);
2633 if (GET_MODE (dest) == GET_MODE (memref))
2634 validate_change (insn, &SET_SRC (body), memref, 1);
2635 else
2637 /* Convert the mem ref to the destination mode. */
2638 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2640 start_sequence ();
2641 convert_move (newreg, memref,
2642 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2643 seq = get_insns ();
2644 end_sequence ();
2646 validate_change (insn, &SET_SRC (body), newreg, 1);
2650 /* See if we can convert this extraction or insertion into
2651 a simple move insn. We might not be able to do so if this
2652 was, for example, part of a PARALLEL.
2654 If we succeed, write out any needed conversions. If we fail,
2655 it is hard to guess why we failed, so don't do anything
2656 special; just let the optimization be suppressed. */
2658 if (apply_change_group () && seq)
2659 emit_insns_before (seq, insn);
2664 /* These routines are responsible for converting virtual register references
2665 to the actual hard register references once RTL generation is complete.
2667 The following four variables are used for communication between the
2668 routines. They contain the offsets of the virtual registers from their
2669 respective hard registers. */
2671 static int in_arg_offset;
2672 static int var_offset;
2673 static int dynamic_offset;
2674 static int out_arg_offset;
2676 /* In most machines, the stack pointer register is equivalent to the bottom
2677 of the stack. */
2679 #ifndef STACK_POINTER_OFFSET
2680 #define STACK_POINTER_OFFSET 0
2681 #endif
2683 /* If not defined, pick an appropriate default for the offset of dynamically
2684 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2685 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2687 #ifndef STACK_DYNAMIC_OFFSET
2689 #ifdef ACCUMULATE_OUTGOING_ARGS
2690 /* The bottom of the stack points to the actual arguments. If
2691 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2692 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2693 stack space for register parameters is not pushed by the caller, but
2694 rather part of the fixed stack areas and hence not included in
2695 `current_function_outgoing_args_size'. Nevertheless, we must allow
2696 for it when allocating stack dynamic objects. */
2698 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2699 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2700 (current_function_outgoing_args_size \
2701 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2703 #else
2704 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2705 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2706 #endif
2708 #else
2709 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2710 #endif
2711 #endif
2713 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2714 its address taken. DECL is the decl for the object stored in the
2715 register, for later use if we do need to force REG into the stack.
2716 REG is overwritten by the MEM like in put_reg_into_stack. */
2719 gen_mem_addressof (reg, decl)
2720 rtx reg;
2721 tree decl;
2723 tree type = TREE_TYPE (decl);
2725 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2726 SET_ADDRESSOF_DECL (r, decl);
2728 XEXP (reg, 0) = r;
2729 PUT_CODE (reg, MEM);
2730 PUT_MODE (reg, DECL_MODE (decl));
2731 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2732 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2734 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2735 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2737 return reg;
2740 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2742 void
2743 flush_addressof (decl)
2744 tree decl;
2746 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2747 && DECL_RTL (decl) != 0
2748 && GET_CODE (DECL_RTL (decl)) == MEM
2749 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2750 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2751 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2754 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2756 static void
2757 put_addressof_into_stack (r)
2758 rtx r;
2760 tree decl = ADDRESSOF_DECL (r);
2761 rtx reg = XEXP (r, 0);
2763 if (GET_CODE (reg) != REG)
2764 abort ();
2766 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2767 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2768 ADDRESSOF_REGNO (r),
2769 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2772 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2773 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2774 the stack. */
2776 static void
2777 purge_addressof_1 (loc, insn, force)
2778 rtx *loc;
2779 rtx insn;
2780 int force;
2782 rtx x;
2783 RTX_CODE code;
2784 int i, j;
2785 char *fmt;
2787 /* Re-start here to avoid recursion in common cases. */
2788 restart:
2790 x = *loc;
2791 if (x == 0)
2792 return;
2794 code = GET_CODE (x);
2796 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2798 rtx insns;
2799 /* We must create a copy of the rtx because it was created by
2800 overwriting a REG rtx which is always shared. */
2801 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2803 if (validate_change (insn, loc, sub, 0))
2804 return;
2806 start_sequence ();
2807 if (! validate_change (insn, loc,
2808 force_operand (sub, NULL_RTX),
2810 abort ();
2812 insns = get_insns ();
2813 end_sequence ();
2814 emit_insns_before (insns, insn);
2815 return;
2817 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2819 rtx sub = XEXP (XEXP (x, 0), 0);
2821 if (GET_CODE (sub) == MEM)
2822 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2824 if (GET_CODE (sub) == REG
2825 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2827 put_addressof_into_stack (XEXP (x, 0));
2828 return;
2830 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2832 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2834 rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2835 if (validate_change (insn, loc, sub2, 0))
2836 goto restart;
2839 else if (validate_change (insn, loc, sub, 0))
2840 goto restart;
2841 /* else give up and put it into the stack */
2843 else if (code == ADDRESSOF)
2845 put_addressof_into_stack (x);
2846 return;
2849 /* Scan all subexpressions. */
2850 fmt = GET_RTX_FORMAT (code);
2851 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2853 if (*fmt == 'e')
2854 purge_addressof_1 (&XEXP (x, i), insn, force);
2855 else if (*fmt == 'E')
2856 for (j = 0; j < XVECLEN (x, i); j++)
2857 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2861 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2862 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2863 stack. */
2865 void
2866 purge_addressof (insns)
2867 rtx insns;
2869 rtx insn;
2870 for (insn = insns; insn; insn = NEXT_INSN (insn))
2871 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2872 || GET_CODE (insn) == CALL_INSN)
2874 purge_addressof_1 (&PATTERN (insn), insn,
2875 asm_noperands (PATTERN (insn)) > 0);
2876 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2880 /* Pass through the INSNS of function FNDECL and convert virtual register
2881 references to hard register references. */
2883 void
2884 instantiate_virtual_regs (fndecl, insns)
2885 tree fndecl;
2886 rtx insns;
2888 rtx insn;
2889 int i;
2891 /* Compute the offsets to use for this function. */
2892 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2893 var_offset = STARTING_FRAME_OFFSET;
2894 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2895 out_arg_offset = STACK_POINTER_OFFSET;
2897 /* Scan all variables and parameters of this function. For each that is
2898 in memory, instantiate all virtual registers if the result is a valid
2899 address. If not, we do it later. That will handle most uses of virtual
2900 regs on many machines. */
2901 instantiate_decls (fndecl, 1);
2903 /* Initialize recognition, indicating that volatile is OK. */
2904 init_recog ();
2906 /* Scan through all the insns, instantiating every virtual register still
2907 present. */
2908 for (insn = insns; insn; insn = NEXT_INSN (insn))
2909 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2910 || GET_CODE (insn) == CALL_INSN)
2912 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2913 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2916 /* Instantiate the stack slots for the parm registers, for later use in
2917 addressof elimination. */
2918 for (i = 0; i < max_parm_reg; ++i)
2919 if (parm_reg_stack_loc[i])
2920 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2922 /* Now instantiate the remaining register equivalences for debugging info.
2923 These will not be valid addresses. */
2924 instantiate_decls (fndecl, 0);
2926 /* Indicate that, from now on, assign_stack_local should use
2927 frame_pointer_rtx. */
2928 virtuals_instantiated = 1;
2931 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2932 all virtual registers in their DECL_RTL's.
2934 If VALID_ONLY, do this only if the resulting address is still valid.
2935 Otherwise, always do it. */
2937 static void
2938 instantiate_decls (fndecl, valid_only)
2939 tree fndecl;
2940 int valid_only;
2942 tree decl;
2944 if (DECL_SAVED_INSNS (fndecl))
2945 /* When compiling an inline function, the obstack used for
2946 rtl allocation is the maybepermanent_obstack. Calling
2947 `resume_temporary_allocation' switches us back to that
2948 obstack while we process this function's parameters. */
2949 resume_temporary_allocation ();
2951 /* Process all parameters of the function. */
2952 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2954 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
2956 instantiate_decl (DECL_RTL (decl), size, valid_only);
2958 /* If the parameter was promoted, then the incoming RTL mode may be
2959 larger than the declared type size. We must use the larger of
2960 the two sizes. */
2961 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2962 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2965 /* Now process all variables defined in the function or its subblocks. */
2966 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2968 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2970 /* Save all rtl allocated for this function by raising the
2971 high-water mark on the maybepermanent_obstack. */
2972 preserve_data ();
2973 /* All further rtl allocation is now done in the current_obstack. */
2974 rtl_in_current_obstack ();
2978 /* Subroutine of instantiate_decls: Process all decls in the given
2979 BLOCK node and all its subblocks. */
2981 static void
2982 instantiate_decls_1 (let, valid_only)
2983 tree let;
2984 int valid_only;
2986 tree t;
2988 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2989 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2990 valid_only);
2992 /* Process all subblocks. */
2993 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2994 instantiate_decls_1 (t, valid_only);
2997 /* Subroutine of the preceding procedures: Given RTL representing a
2998 decl and the size of the object, do any instantiation required.
3000 If VALID_ONLY is non-zero, it means that the RTL should only be
3001 changed if the new address is valid. */
3003 static void
3004 instantiate_decl (x, size, valid_only)
3005 rtx x;
3006 int size;
3007 int valid_only;
3009 enum machine_mode mode;
3010 rtx addr;
3012 /* If this is not a MEM, no need to do anything. Similarly if the
3013 address is a constant or a register that is not a virtual register. */
3015 if (x == 0 || GET_CODE (x) != MEM)
3016 return;
3018 addr = XEXP (x, 0);
3019 if (CONSTANT_P (addr)
3020 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3021 || (GET_CODE (addr) == REG
3022 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3023 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3024 return;
3026 /* If we should only do this if the address is valid, copy the address.
3027 We need to do this so we can undo any changes that might make the
3028 address invalid. This copy is unfortunate, but probably can't be
3029 avoided. */
3031 if (valid_only)
3032 addr = copy_rtx (addr);
3034 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3036 if (valid_only)
3038 /* Now verify that the resulting address is valid for every integer or
3039 floating-point mode up to and including SIZE bytes long. We do this
3040 since the object might be accessed in any mode and frame addresses
3041 are shared. */
3043 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3044 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3045 mode = GET_MODE_WIDER_MODE (mode))
3046 if (! memory_address_p (mode, addr))
3047 return;
3049 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3050 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3051 mode = GET_MODE_WIDER_MODE (mode))
3052 if (! memory_address_p (mode, addr))
3053 return;
3056 /* Put back the address now that we have updated it and we either know
3057 it is valid or we don't care whether it is valid. */
3059 XEXP (x, 0) = addr;
3062 /* Given a pointer to a piece of rtx and an optional pointer to the
3063 containing object, instantiate any virtual registers present in it.
3065 If EXTRA_INSNS, we always do the replacement and generate
3066 any extra insns before OBJECT. If it zero, we do nothing if replacement
3067 is not valid.
3069 Return 1 if we either had nothing to do or if we were able to do the
3070 needed replacement. Return 0 otherwise; we only return zero if
3071 EXTRA_INSNS is zero.
3073 We first try some simple transformations to avoid the creation of extra
3074 pseudos. */
3076 static int
3077 instantiate_virtual_regs_1 (loc, object, extra_insns)
3078 rtx *loc;
3079 rtx object;
3080 int extra_insns;
3082 rtx x;
3083 RTX_CODE code;
3084 rtx new = 0;
3085 HOST_WIDE_INT offset;
3086 rtx temp;
3087 rtx seq;
3088 int i, j;
3089 char *fmt;
3091 /* Re-start here to avoid recursion in common cases. */
3092 restart:
3094 x = *loc;
3095 if (x == 0)
3096 return 1;
3098 code = GET_CODE (x);
3100 /* Check for some special cases. */
3101 switch (code)
3103 case CONST_INT:
3104 case CONST_DOUBLE:
3105 case CONST:
3106 case SYMBOL_REF:
3107 case CODE_LABEL:
3108 case PC:
3109 case CC0:
3110 case ASM_INPUT:
3111 case ADDR_VEC:
3112 case ADDR_DIFF_VEC:
3113 case RETURN:
3114 return 1;
3116 case SET:
3117 /* We are allowed to set the virtual registers. This means that
3118 the actual register should receive the source minus the
3119 appropriate offset. This is used, for example, in the handling
3120 of non-local gotos. */
3121 if (SET_DEST (x) == virtual_incoming_args_rtx)
3122 new = arg_pointer_rtx, offset = - in_arg_offset;
3123 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3124 new = frame_pointer_rtx, offset = - var_offset;
3125 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3126 new = stack_pointer_rtx, offset = - dynamic_offset;
3127 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3128 new = stack_pointer_rtx, offset = - out_arg_offset;
3130 if (new)
3132 /* The only valid sources here are PLUS or REG. Just do
3133 the simplest possible thing to handle them. */
3134 if (GET_CODE (SET_SRC (x)) != REG
3135 && GET_CODE (SET_SRC (x)) != PLUS)
3136 abort ();
3138 start_sequence ();
3139 if (GET_CODE (SET_SRC (x)) != REG)
3140 temp = force_operand (SET_SRC (x), NULL_RTX);
3141 else
3142 temp = SET_SRC (x);
3143 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3144 seq = get_insns ();
3145 end_sequence ();
3147 emit_insns_before (seq, object);
3148 SET_DEST (x) = new;
3150 if (! validate_change (object, &SET_SRC (x), temp, 0)
3151 || ! extra_insns)
3152 abort ();
3154 return 1;
3157 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3158 loc = &SET_SRC (x);
3159 goto restart;
3161 case PLUS:
3162 /* Handle special case of virtual register plus constant. */
3163 if (CONSTANT_P (XEXP (x, 1)))
3165 rtx old, new_offset;
3167 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3168 if (GET_CODE (XEXP (x, 0)) == PLUS)
3170 rtx inner = XEXP (XEXP (x, 0), 0);
3172 if (inner == virtual_incoming_args_rtx)
3173 new = arg_pointer_rtx, offset = in_arg_offset;
3174 else if (inner == virtual_stack_vars_rtx)
3175 new = frame_pointer_rtx, offset = var_offset;
3176 else if (inner == virtual_stack_dynamic_rtx)
3177 new = stack_pointer_rtx, offset = dynamic_offset;
3178 else if (inner == virtual_outgoing_args_rtx)
3179 new = stack_pointer_rtx, offset = out_arg_offset;
3180 else
3182 loc = &XEXP (x, 0);
3183 goto restart;
3186 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3187 extra_insns);
3188 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3191 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3192 new = arg_pointer_rtx, offset = in_arg_offset;
3193 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3194 new = frame_pointer_rtx, offset = var_offset;
3195 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3196 new = stack_pointer_rtx, offset = dynamic_offset;
3197 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3198 new = stack_pointer_rtx, offset = out_arg_offset;
3199 else
3201 /* We know the second operand is a constant. Unless the
3202 first operand is a REG (which has been already checked),
3203 it needs to be checked. */
3204 if (GET_CODE (XEXP (x, 0)) != REG)
3206 loc = &XEXP (x, 0);
3207 goto restart;
3209 return 1;
3212 new_offset = plus_constant (XEXP (x, 1), offset);
3214 /* If the new constant is zero, try to replace the sum with just
3215 the register. */
3216 if (new_offset == const0_rtx
3217 && validate_change (object, loc, new, 0))
3218 return 1;
3220 /* Next try to replace the register and new offset.
3221 There are two changes to validate here and we can't assume that
3222 in the case of old offset equals new just changing the register
3223 will yield a valid insn. In the interests of a little efficiency,
3224 however, we only call validate change once (we don't queue up the
3225 changes and then call apply_change_group). */
3227 old = XEXP (x, 0);
3228 if (offset == 0
3229 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3230 : (XEXP (x, 0) = new,
3231 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3233 if (! extra_insns)
3235 XEXP (x, 0) = old;
3236 return 0;
3239 /* Otherwise copy the new constant into a register and replace
3240 constant with that register. */
3241 temp = gen_reg_rtx (Pmode);
3242 XEXP (x, 0) = new;
3243 if (validate_change (object, &XEXP (x, 1), temp, 0))
3244 emit_insn_before (gen_move_insn (temp, new_offset), object);
3245 else
3247 /* If that didn't work, replace this expression with a
3248 register containing the sum. */
3250 XEXP (x, 0) = old;
3251 new = gen_rtx_PLUS (Pmode, new, new_offset);
3253 start_sequence ();
3254 temp = force_operand (new, NULL_RTX);
3255 seq = get_insns ();
3256 end_sequence ();
3258 emit_insns_before (seq, object);
3259 if (! validate_change (object, loc, temp, 0)
3260 && ! validate_replace_rtx (x, temp, object))
3261 abort ();
3265 return 1;
3268 /* Fall through to generic two-operand expression case. */
3269 case EXPR_LIST:
3270 case CALL:
3271 case COMPARE:
3272 case MINUS:
3273 case MULT:
3274 case DIV: case UDIV:
3275 case MOD: case UMOD:
3276 case AND: case IOR: case XOR:
3277 case ROTATERT: case ROTATE:
3278 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3279 case NE: case EQ:
3280 case GE: case GT: case GEU: case GTU:
3281 case LE: case LT: case LEU: case LTU:
3282 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3283 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3284 loc = &XEXP (x, 0);
3285 goto restart;
3287 case MEM:
3288 /* Most cases of MEM that convert to valid addresses have already been
3289 handled by our scan of decls. The only special handling we
3290 need here is to make a copy of the rtx to ensure it isn't being
3291 shared if we have to change it to a pseudo.
3293 If the rtx is a simple reference to an address via a virtual register,
3294 it can potentially be shared. In such cases, first try to make it
3295 a valid address, which can also be shared. Otherwise, copy it and
3296 proceed normally.
3298 First check for common cases that need no processing. These are
3299 usually due to instantiation already being done on a previous instance
3300 of a shared rtx. */
3302 temp = XEXP (x, 0);
3303 if (CONSTANT_ADDRESS_P (temp)
3304 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3305 || temp == arg_pointer_rtx
3306 #endif
3307 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3308 || temp == hard_frame_pointer_rtx
3309 #endif
3310 || temp == frame_pointer_rtx)
3311 return 1;
3313 if (GET_CODE (temp) == PLUS
3314 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3315 && (XEXP (temp, 0) == frame_pointer_rtx
3316 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3317 || XEXP (temp, 0) == hard_frame_pointer_rtx
3318 #endif
3319 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3320 || XEXP (temp, 0) == arg_pointer_rtx
3321 #endif
3323 return 1;
3325 if (temp == virtual_stack_vars_rtx
3326 || temp == virtual_incoming_args_rtx
3327 || (GET_CODE (temp) == PLUS
3328 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3329 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3330 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3332 /* This MEM may be shared. If the substitution can be done without
3333 the need to generate new pseudos, we want to do it in place
3334 so all copies of the shared rtx benefit. The call below will
3335 only make substitutions if the resulting address is still
3336 valid.
3338 Note that we cannot pass X as the object in the recursive call
3339 since the insn being processed may not allow all valid
3340 addresses. However, if we were not passed on object, we can
3341 only modify X without copying it if X will have a valid
3342 address.
3344 ??? Also note that this can still lose if OBJECT is an insn that
3345 has less restrictions on an address that some other insn.
3346 In that case, we will modify the shared address. This case
3347 doesn't seem very likely, though. One case where this could
3348 happen is in the case of a USE or CLOBBER reference, but we
3349 take care of that below. */
3351 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3352 object ? object : x, 0))
3353 return 1;
3355 /* Otherwise make a copy and process that copy. We copy the entire
3356 RTL expression since it might be a PLUS which could also be
3357 shared. */
3358 *loc = x = copy_rtx (x);
3361 /* Fall through to generic unary operation case. */
3362 case SUBREG:
3363 case STRICT_LOW_PART:
3364 case NEG: case NOT:
3365 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3366 case SIGN_EXTEND: case ZERO_EXTEND:
3367 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3368 case FLOAT: case FIX:
3369 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3370 case ABS:
3371 case SQRT:
3372 case FFS:
3373 /* These case either have just one operand or we know that we need not
3374 check the rest of the operands. */
3375 loc = &XEXP (x, 0);
3376 goto restart;
3378 case USE:
3379 case CLOBBER:
3380 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3381 go ahead and make the invalid one, but do it to a copy. For a REG,
3382 just make the recursive call, since there's no chance of a problem. */
3384 if ((GET_CODE (XEXP (x, 0)) == MEM
3385 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3387 || (GET_CODE (XEXP (x, 0)) == REG
3388 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3389 return 1;
3391 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3392 loc = &XEXP (x, 0);
3393 goto restart;
3395 case REG:
3396 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3397 in front of this insn and substitute the temporary. */
3398 if (x == virtual_incoming_args_rtx)
3399 new = arg_pointer_rtx, offset = in_arg_offset;
3400 else if (x == virtual_stack_vars_rtx)
3401 new = frame_pointer_rtx, offset = var_offset;
3402 else if (x == virtual_stack_dynamic_rtx)
3403 new = stack_pointer_rtx, offset = dynamic_offset;
3404 else if (x == virtual_outgoing_args_rtx)
3405 new = stack_pointer_rtx, offset = out_arg_offset;
3407 if (new)
3409 temp = plus_constant (new, offset);
3410 if (!validate_change (object, loc, temp, 0))
3412 if (! extra_insns)
3413 return 0;
3415 start_sequence ();
3416 temp = force_operand (temp, NULL_RTX);
3417 seq = get_insns ();
3418 end_sequence ();
3420 emit_insns_before (seq, object);
3421 if (! validate_change (object, loc, temp, 0)
3422 && ! validate_replace_rtx (x, temp, object))
3423 abort ();
3427 return 1;
3429 case ADDRESSOF:
3430 if (GET_CODE (XEXP (x, 0)) == REG)
3431 return 1;
3433 else if (GET_CODE (XEXP (x, 0)) == MEM)
3435 /* If we have a (addressof (mem ..)), do any instantiation inside
3436 since we know we'll be making the inside valid when we finally
3437 remove the ADDRESSOF. */
3438 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3439 return 1;
3441 break;
3443 default:
3444 break;
3447 /* Scan all subexpressions. */
3448 fmt = GET_RTX_FORMAT (code);
3449 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3450 if (*fmt == 'e')
3452 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3453 return 0;
3455 else if (*fmt == 'E')
3456 for (j = 0; j < XVECLEN (x, i); j++)
3457 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3458 extra_insns))
3459 return 0;
3461 return 1;
3464 /* Optimization: assuming this function does not receive nonlocal gotos,
3465 delete the handlers for such, as well as the insns to establish
3466 and disestablish them. */
3468 static void
3469 delete_handlers ()
3471 rtx insn;
3472 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3474 /* Delete the handler by turning off the flag that would
3475 prevent jump_optimize from deleting it.
3476 Also permit deletion of the nonlocal labels themselves
3477 if nothing local refers to them. */
3478 if (GET_CODE (insn) == CODE_LABEL)
3480 tree t, last_t;
3482 LABEL_PRESERVE_P (insn) = 0;
3484 /* Remove it from the nonlocal_label list, to avoid confusing
3485 flow. */
3486 for (t = nonlocal_labels, last_t = 0; t;
3487 last_t = t, t = TREE_CHAIN (t))
3488 if (DECL_RTL (TREE_VALUE (t)) == insn)
3489 break;
3490 if (t)
3492 if (! last_t)
3493 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3494 else
3495 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3498 if (GET_CODE (insn) == INSN
3499 && ((nonlocal_goto_handler_slot != 0
3500 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3501 || (nonlocal_goto_stack_level != 0
3502 && reg_mentioned_p (nonlocal_goto_stack_level,
3503 PATTERN (insn)))))
3504 delete_insn (insn);
3508 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3509 of the current function. */
3512 nonlocal_label_rtx_list ()
3514 tree t;
3515 rtx x = 0;
3517 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3518 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3520 return x;
3523 /* Output a USE for any register use in RTL.
3524 This is used with -noreg to mark the extent of lifespan
3525 of any registers used in a user-visible variable's DECL_RTL. */
3527 void
3528 use_variable (rtl)
3529 rtx rtl;
3531 if (GET_CODE (rtl) == REG)
3532 /* This is a register variable. */
3533 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3534 else if (GET_CODE (rtl) == MEM
3535 && GET_CODE (XEXP (rtl, 0)) == REG
3536 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3537 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3538 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3539 /* This is a variable-sized structure. */
3540 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3543 /* Like use_variable except that it outputs the USEs after INSN
3544 instead of at the end of the insn-chain. */
3546 void
3547 use_variable_after (rtl, insn)
3548 rtx rtl, insn;
3550 if (GET_CODE (rtl) == REG)
3551 /* This is a register variable. */
3552 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3553 else if (GET_CODE (rtl) == MEM
3554 && GET_CODE (XEXP (rtl, 0)) == REG
3555 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3556 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3557 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3558 /* This is a variable-sized structure. */
3559 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3563 max_parm_reg_num ()
3565 return max_parm_reg;
3568 /* Return the first insn following those generated by `assign_parms'. */
3571 get_first_nonparm_insn ()
3573 if (last_parm_insn)
3574 return NEXT_INSN (last_parm_insn);
3575 return get_insns ();
3578 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3579 Crash if there is none. */
3582 get_first_block_beg ()
3584 register rtx searcher;
3585 register rtx insn = get_first_nonparm_insn ();
3587 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3588 if (GET_CODE (searcher) == NOTE
3589 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3590 return searcher;
3592 abort (); /* Invalid call to this function. (See comments above.) */
3593 return NULL_RTX;
3596 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3597 This means a type for which function calls must pass an address to the
3598 function or get an address back from the function.
3599 EXP may be a type node or an expression (whose type is tested). */
3602 aggregate_value_p (exp)
3603 tree exp;
3605 int i, regno, nregs;
3606 rtx reg;
3607 tree type;
3608 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3609 type = exp;
3610 else
3611 type = TREE_TYPE (exp);
3613 if (RETURN_IN_MEMORY (type))
3614 return 1;
3615 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3616 and thus can't be returned in registers. */
3617 if (TREE_ADDRESSABLE (type))
3618 return 1;
3619 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3620 return 1;
3621 /* Make sure we have suitable call-clobbered regs to return
3622 the value in; if not, we must return it in memory. */
3623 reg = hard_function_value (type, 0);
3625 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3626 it is OK. */
3627 if (GET_CODE (reg) != REG)
3628 return 0;
3630 regno = REGNO (reg);
3631 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3632 for (i = 0; i < nregs; i++)
3633 if (! call_used_regs[regno + i])
3634 return 1;
3635 return 0;
3638 /* Assign RTL expressions to the function's parameters.
3639 This may involve copying them into registers and using
3640 those registers as the RTL for them.
3642 If SECOND_TIME is non-zero it means that this function is being
3643 called a second time. This is done by integrate.c when a function's
3644 compilation is deferred. We need to come back here in case the
3645 FUNCTION_ARG macro computes items needed for the rest of the compilation
3646 (such as changing which registers are fixed or caller-saved). But suppress
3647 writing any insns or setting DECL_RTL of anything in this case. */
3649 void
3650 assign_parms (fndecl, second_time)
3651 tree fndecl;
3652 int second_time;
3654 register tree parm;
3655 register rtx entry_parm = 0;
3656 register rtx stack_parm = 0;
3657 CUMULATIVE_ARGS args_so_far;
3658 enum machine_mode promoted_mode, passed_mode;
3659 enum machine_mode nominal_mode, promoted_nominal_mode;
3660 int unsignedp;
3661 /* Total space needed so far for args on the stack,
3662 given as a constant and a tree-expression. */
3663 struct args_size stack_args_size;
3664 tree fntype = TREE_TYPE (fndecl);
3665 tree fnargs = DECL_ARGUMENTS (fndecl);
3666 /* This is used for the arg pointer when referring to stack args. */
3667 rtx internal_arg_pointer;
3668 /* This is a dummy PARM_DECL that we used for the function result if
3669 the function returns a structure. */
3670 tree function_result_decl = 0;
3671 int varargs_setup = 0;
3672 rtx conversion_insns = 0;
3674 /* Nonzero if the last arg is named `__builtin_va_alist',
3675 which is used on some machines for old-fashioned non-ANSI varargs.h;
3676 this should be stuck onto the stack as if it had arrived there. */
3677 int hide_last_arg
3678 = (current_function_varargs
3679 && fnargs
3680 && (parm = tree_last (fnargs)) != 0
3681 && DECL_NAME (parm)
3682 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3683 "__builtin_va_alist")));
3685 /* Nonzero if function takes extra anonymous args.
3686 This means the last named arg must be on the stack
3687 right before the anonymous ones. */
3688 int stdarg
3689 = (TYPE_ARG_TYPES (fntype) != 0
3690 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3691 != void_type_node));
3693 current_function_stdarg = stdarg;
3695 /* If the reg that the virtual arg pointer will be translated into is
3696 not a fixed reg or is the stack pointer, make a copy of the virtual
3697 arg pointer, and address parms via the copy. The frame pointer is
3698 considered fixed even though it is not marked as such.
3700 The second time through, simply use ap to avoid generating rtx. */
3702 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3703 || ! (fixed_regs[ARG_POINTER_REGNUM]
3704 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3705 && ! second_time)
3706 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3707 else
3708 internal_arg_pointer = virtual_incoming_args_rtx;
3709 current_function_internal_arg_pointer = internal_arg_pointer;
3711 stack_args_size.constant = 0;
3712 stack_args_size.var = 0;
3714 /* If struct value address is treated as the first argument, make it so. */
3715 if (aggregate_value_p (DECL_RESULT (fndecl))
3716 && ! current_function_returns_pcc_struct
3717 && struct_value_incoming_rtx == 0)
3719 tree type = build_pointer_type (TREE_TYPE (fntype));
3721 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3723 DECL_ARG_TYPE (function_result_decl) = type;
3724 TREE_CHAIN (function_result_decl) = fnargs;
3725 fnargs = function_result_decl;
3728 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3729 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3730 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3732 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3733 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3734 #else
3735 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3736 #endif
3738 /* We haven't yet found an argument that we must push and pretend the
3739 caller did. */
3740 current_function_pretend_args_size = 0;
3742 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3744 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3745 struct args_size stack_offset;
3746 struct args_size arg_size;
3747 int passed_pointer = 0;
3748 int did_conversion = 0;
3749 tree passed_type = DECL_ARG_TYPE (parm);
3750 tree nominal_type = TREE_TYPE (parm);
3752 /* Set LAST_NAMED if this is last named arg before some
3753 anonymous args. */
3754 int last_named = ((TREE_CHAIN (parm) == 0
3755 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3756 && (stdarg || current_function_varargs));
3757 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3758 most machines, if this is a varargs/stdarg function, then we treat
3759 the last named arg as if it were anonymous too. */
3760 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3762 if (TREE_TYPE (parm) == error_mark_node
3763 /* This can happen after weird syntax errors
3764 or if an enum type is defined among the parms. */
3765 || TREE_CODE (parm) != PARM_DECL
3766 || passed_type == NULL)
3768 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3769 = gen_rtx_MEM (BLKmode, const0_rtx);
3770 TREE_USED (parm) = 1;
3771 continue;
3774 /* For varargs.h function, save info about regs and stack space
3775 used by the individual args, not including the va_alist arg. */
3776 if (hide_last_arg && last_named)
3777 current_function_args_info = args_so_far;
3779 /* Find mode of arg as it is passed, and mode of arg
3780 as it should be during execution of this function. */
3781 passed_mode = TYPE_MODE (passed_type);
3782 nominal_mode = TYPE_MODE (nominal_type);
3784 /* If the parm's mode is VOID, its value doesn't matter,
3785 and avoid the usual things like emit_move_insn that could crash. */
3786 if (nominal_mode == VOIDmode)
3788 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3789 continue;
3792 /* If the parm is to be passed as a transparent union, use the
3793 type of the first field for the tests below. We have already
3794 verified that the modes are the same. */
3795 if (DECL_TRANSPARENT_UNION (parm)
3796 || TYPE_TRANSPARENT_UNION (passed_type))
3797 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3799 /* See if this arg was passed by invisible reference. It is if
3800 it is an object whose size depends on the contents of the
3801 object itself or if the machine requires these objects be passed
3802 that way. */
3804 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3805 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3806 || TREE_ADDRESSABLE (passed_type)
3807 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3808 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3809 passed_type, named_arg)
3810 #endif
3813 passed_type = nominal_type = build_pointer_type (passed_type);
3814 passed_pointer = 1;
3815 passed_mode = nominal_mode = Pmode;
3818 promoted_mode = passed_mode;
3820 #ifdef PROMOTE_FUNCTION_ARGS
3821 /* Compute the mode in which the arg is actually extended to. */
3822 unsignedp = TREE_UNSIGNED (passed_type);
3823 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3824 #endif
3826 /* Let machine desc say which reg (if any) the parm arrives in.
3827 0 means it arrives on the stack. */
3828 #ifdef FUNCTION_INCOMING_ARG
3829 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3830 passed_type, named_arg);
3831 #else
3832 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3833 passed_type, named_arg);
3834 #endif
3836 if (entry_parm == 0)
3837 promoted_mode = passed_mode;
3839 #ifdef SETUP_INCOMING_VARARGS
3840 /* If this is the last named parameter, do any required setup for
3841 varargs or stdargs. We need to know about the case of this being an
3842 addressable type, in which case we skip the registers it
3843 would have arrived in.
3845 For stdargs, LAST_NAMED will be set for two parameters, the one that
3846 is actually the last named, and the dummy parameter. We only
3847 want to do this action once.
3849 Also, indicate when RTL generation is to be suppressed. */
3850 if (last_named && !varargs_setup)
3852 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3853 current_function_pretend_args_size,
3854 second_time);
3855 varargs_setup = 1;
3857 #endif
3859 /* Determine parm's home in the stack,
3860 in case it arrives in the stack or we should pretend it did.
3862 Compute the stack position and rtx where the argument arrives
3863 and its size.
3865 There is one complexity here: If this was a parameter that would
3866 have been passed in registers, but wasn't only because it is
3867 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3868 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3869 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3870 0 as it was the previous time. */
3872 locate_and_pad_parm (promoted_mode, passed_type,
3873 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3875 #else
3876 #ifdef FUNCTION_INCOMING_ARG
3877 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3878 passed_type,
3879 (named_arg
3880 || varargs_setup)) != 0,
3881 #else
3882 FUNCTION_ARG (args_so_far, promoted_mode,
3883 passed_type,
3884 named_arg || varargs_setup) != 0,
3885 #endif
3886 #endif
3887 fndecl, &stack_args_size, &stack_offset, &arg_size);
3889 if (! second_time)
3891 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3893 if (offset_rtx == const0_rtx)
3894 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3895 else
3896 stack_parm = gen_rtx_MEM (promoted_mode,
3897 gen_rtx_PLUS (Pmode,
3898 internal_arg_pointer,
3899 offset_rtx));
3901 /* If this is a memory ref that contains aggregate components,
3902 mark it as such for cse and loop optimize. Likewise if it
3903 is readonly. */
3904 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3905 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3908 /* If this parameter was passed both in registers and in the stack,
3909 use the copy on the stack. */
3910 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3911 entry_parm = 0;
3913 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3914 /* If this parm was passed part in regs and part in memory,
3915 pretend it arrived entirely in memory
3916 by pushing the register-part onto the stack.
3918 In the special case of a DImode or DFmode that is split,
3919 we could put it together in a pseudoreg directly,
3920 but for now that's not worth bothering with. */
3922 if (entry_parm)
3924 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3925 passed_type, named_arg);
3927 if (nregs > 0)
3929 current_function_pretend_args_size
3930 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3931 / (PARM_BOUNDARY / BITS_PER_UNIT)
3932 * (PARM_BOUNDARY / BITS_PER_UNIT));
3934 if (! second_time)
3936 /* Handle calls that pass values in multiple non-contiguous
3937 locations. The Irix 6 ABI has examples of this. */
3938 if (GET_CODE (entry_parm) == PARALLEL)
3939 emit_group_store (validize_mem (stack_parm),
3940 entry_parm);
3941 else
3942 move_block_from_reg (REGNO (entry_parm),
3943 validize_mem (stack_parm), nregs,
3944 int_size_in_bytes (TREE_TYPE (parm)));
3946 entry_parm = stack_parm;
3949 #endif
3951 /* If we didn't decide this parm came in a register,
3952 by default it came on the stack. */
3953 if (entry_parm == 0)
3954 entry_parm = stack_parm;
3956 /* Record permanently how this parm was passed. */
3957 if (! second_time)
3958 DECL_INCOMING_RTL (parm) = entry_parm;
3960 /* If there is actually space on the stack for this parm,
3961 count it in stack_args_size; otherwise set stack_parm to 0
3962 to indicate there is no preallocated stack slot for the parm. */
3964 if (entry_parm == stack_parm
3965 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3966 /* On some machines, even if a parm value arrives in a register
3967 there is still an (uninitialized) stack slot allocated for it.
3969 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3970 whether this parameter already has a stack slot allocated,
3971 because an arg block exists only if current_function_args_size
3972 is larger than some threshold, and we haven't calculated that
3973 yet. So, for now, we just assume that stack slots never exist
3974 in this case. */
3975 || REG_PARM_STACK_SPACE (fndecl) > 0
3976 #endif
3979 stack_args_size.constant += arg_size.constant;
3980 if (arg_size.var)
3981 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3983 else
3984 /* No stack slot was pushed for this parm. */
3985 stack_parm = 0;
3987 /* Update info on where next arg arrives in registers. */
3989 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3990 passed_type, named_arg);
3992 /* If this is our second time through, we are done with this parm. */
3993 if (second_time)
3994 continue;
3996 /* If we can't trust the parm stack slot to be aligned enough
3997 for its ultimate type, don't use that slot after entry.
3998 We'll make another stack slot, if we need one. */
4000 int thisparm_boundary
4001 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4003 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4004 stack_parm = 0;
4007 /* If parm was passed in memory, and we need to convert it on entry,
4008 don't store it back in that same slot. */
4009 if (entry_parm != 0
4010 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4011 stack_parm = 0;
4013 #if 0
4014 /* Now adjust STACK_PARM to the mode and precise location
4015 where this parameter should live during execution,
4016 if we discover that it must live in the stack during execution.
4017 To make debuggers happier on big-endian machines, we store
4018 the value in the last bytes of the space available. */
4020 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4021 && stack_parm != 0)
4023 rtx offset_rtx;
4025 if (BYTES_BIG_ENDIAN
4026 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4027 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4028 - GET_MODE_SIZE (nominal_mode));
4030 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4031 if (offset_rtx == const0_rtx)
4032 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4033 else
4034 stack_parm = gen_rtx_MEM (nominal_mode,
4035 gen_rtx_PLUS (Pmode,
4036 internal_arg_pointer,
4037 offset_rtx));
4039 /* If this is a memory ref that contains aggregate components,
4040 mark it as such for cse and loop optimize. */
4041 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4043 #endif /* 0 */
4045 #ifdef STACK_REGS
4046 /* We need this "use" info, because the gcc-register->stack-register
4047 converter in reg-stack.c needs to know which registers are active
4048 at the start of the function call. The actual parameter loading
4049 instructions are not always available then anymore, since they might
4050 have been optimised away. */
4052 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4053 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4054 #endif
4056 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4057 in the mode in which it arrives.
4058 STACK_PARM is an RTX for a stack slot where the parameter can live
4059 during the function (in case we want to put it there).
4060 STACK_PARM is 0 if no stack slot was pushed for it.
4062 Now output code if necessary to convert ENTRY_PARM to
4063 the type in which this function declares it,
4064 and store that result in an appropriate place,
4065 which may be a pseudo reg, may be STACK_PARM,
4066 or may be a local stack slot if STACK_PARM is 0.
4068 Set DECL_RTL to that place. */
4070 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4072 /* If a BLKmode arrives in registers, copy it to a stack slot.
4073 Handle calls that pass values in multiple non-contiguous
4074 locations. The Irix 6 ABI has examples of this. */
4075 if (GET_CODE (entry_parm) == REG
4076 || GET_CODE (entry_parm) == PARALLEL)
4078 int size_stored
4079 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4080 UNITS_PER_WORD);
4082 /* Note that we will be storing an integral number of words.
4083 So we have to be careful to ensure that we allocate an
4084 integral number of words. We do this below in the
4085 assign_stack_local if space was not allocated in the argument
4086 list. If it was, this will not work if PARM_BOUNDARY is not
4087 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4088 if it becomes a problem. */
4090 if (stack_parm == 0)
4092 stack_parm
4093 = assign_stack_local (GET_MODE (entry_parm),
4094 size_stored, 0);
4096 /* If this is a memory ref that contains aggregate
4097 components, mark it as such for cse and loop optimize. */
4098 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4101 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4102 abort ();
4104 if (TREE_READONLY (parm))
4105 RTX_UNCHANGING_P (stack_parm) = 1;
4107 /* Handle calls that pass values in multiple non-contiguous
4108 locations. The Irix 6 ABI has examples of this. */
4109 if (GET_CODE (entry_parm) == PARALLEL)
4110 emit_group_store (validize_mem (stack_parm), entry_parm);
4111 else
4112 move_block_from_reg (REGNO (entry_parm),
4113 validize_mem (stack_parm),
4114 size_stored / UNITS_PER_WORD,
4115 int_size_in_bytes (TREE_TYPE (parm)));
4117 DECL_RTL (parm) = stack_parm;
4119 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4120 && ! DECL_INLINE (fndecl))
4121 /* layout_decl may set this. */
4122 || TREE_ADDRESSABLE (parm)
4123 || TREE_SIDE_EFFECTS (parm)
4124 /* If -ffloat-store specified, don't put explicit
4125 float variables into registers. */
4126 || (flag_float_store
4127 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4128 /* Always assign pseudo to structure return or item passed
4129 by invisible reference. */
4130 || passed_pointer || parm == function_result_decl)
4132 /* Store the parm in a pseudoregister during the function, but we
4133 may need to do it in a wider mode. */
4135 register rtx parmreg;
4136 int regno, regnoi = 0, regnor = 0;
4138 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4140 promoted_nominal_mode
4141 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4143 parmreg = gen_reg_rtx (promoted_nominal_mode);
4144 mark_user_reg (parmreg);
4146 /* If this was an item that we received a pointer to, set DECL_RTL
4147 appropriately. */
4148 if (passed_pointer)
4150 DECL_RTL (parm)
4151 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4152 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4154 else
4155 DECL_RTL (parm) = parmreg;
4157 /* Copy the value into the register. */
4158 if (nominal_mode != passed_mode
4159 || promoted_nominal_mode != promoted_mode)
4161 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4162 mode, by the caller. We now have to convert it to
4163 NOMINAL_MODE, if different. However, PARMREG may be in
4164 a different mode than NOMINAL_MODE if it is being stored
4165 promoted.
4167 If ENTRY_PARM is a hard register, it might be in a register
4168 not valid for operating in its mode (e.g., an odd-numbered
4169 register for a DFmode). In that case, moves are the only
4170 thing valid, so we can't do a convert from there. This
4171 occurs when the calling sequence allow such misaligned
4172 usages.
4174 In addition, the conversion may involve a call, which could
4175 clobber parameters which haven't been copied to pseudo
4176 registers yet. Therefore, we must first copy the parm to
4177 a pseudo reg here, and save the conversion until after all
4178 parameters have been moved. */
4180 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4182 emit_move_insn (tempreg, validize_mem (entry_parm));
4184 push_to_sequence (conversion_insns);
4185 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4187 expand_assignment (parm,
4188 make_tree (nominal_type, tempreg), 0, 0);
4189 conversion_insns = get_insns ();
4190 did_conversion = 1;
4191 end_sequence ();
4193 else
4194 emit_move_insn (parmreg, validize_mem (entry_parm));
4196 /* If we were passed a pointer but the actual value
4197 can safely live in a register, put it in one. */
4198 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4199 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4200 && ! DECL_INLINE (fndecl))
4201 /* layout_decl may set this. */
4202 || TREE_ADDRESSABLE (parm)
4203 || TREE_SIDE_EFFECTS (parm)
4204 /* If -ffloat-store specified, don't put explicit
4205 float variables into registers. */
4206 || (flag_float_store
4207 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4209 /* We can't use nominal_mode, because it will have been set to
4210 Pmode above. We must use the actual mode of the parm. */
4211 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4212 mark_user_reg (parmreg);
4213 emit_move_insn (parmreg, DECL_RTL (parm));
4214 DECL_RTL (parm) = parmreg;
4215 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4216 now the parm. */
4217 stack_parm = 0;
4219 #ifdef FUNCTION_ARG_CALLEE_COPIES
4220 /* If we are passed an arg by reference and it is our responsibility
4221 to make a copy, do it now.
4222 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4223 original argument, so we must recreate them in the call to
4224 FUNCTION_ARG_CALLEE_COPIES. */
4225 /* ??? Later add code to handle the case that if the argument isn't
4226 modified, don't do the copy. */
4228 else if (passed_pointer
4229 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4230 TYPE_MODE (DECL_ARG_TYPE (parm)),
4231 DECL_ARG_TYPE (parm),
4232 named_arg)
4233 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4235 rtx copy;
4236 tree type = DECL_ARG_TYPE (parm);
4238 /* This sequence may involve a library call perhaps clobbering
4239 registers that haven't been copied to pseudos yet. */
4241 push_to_sequence (conversion_insns);
4243 if (TYPE_SIZE (type) == 0
4244 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4245 /* This is a variable sized object. */
4246 copy = gen_rtx_MEM (BLKmode,
4247 allocate_dynamic_stack_space
4248 (expr_size (parm), NULL_RTX,
4249 TYPE_ALIGN (type)));
4250 else
4251 copy = assign_stack_temp (TYPE_MODE (type),
4252 int_size_in_bytes (type), 1);
4253 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4254 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4256 store_expr (parm, copy, 0);
4257 emit_move_insn (parmreg, XEXP (copy, 0));
4258 if (flag_check_memory_usage)
4259 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4260 XEXP (copy, 0), ptr_mode,
4261 GEN_INT (int_size_in_bytes (type)),
4262 TYPE_MODE (sizetype),
4263 GEN_INT (MEMORY_USE_RW),
4264 TYPE_MODE (integer_type_node));
4265 conversion_insns = get_insns ();
4266 did_conversion = 1;
4267 end_sequence ();
4269 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4271 /* In any case, record the parm's desired stack location
4272 in case we later discover it must live in the stack.
4274 If it is a COMPLEX value, store the stack location for both
4275 halves. */
4277 if (GET_CODE (parmreg) == CONCAT)
4278 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4279 else
4280 regno = REGNO (parmreg);
4282 if (regno >= max_parm_reg)
4284 rtx *new;
4285 int old_max_parm_reg = max_parm_reg;
4287 /* It's slow to expand this one register at a time,
4288 but it's also rare and we need max_parm_reg to be
4289 precisely correct. */
4290 max_parm_reg = regno + 1;
4291 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4292 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4293 old_max_parm_reg * sizeof (rtx));
4294 bzero ((char *) (new + old_max_parm_reg),
4295 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4296 parm_reg_stack_loc = new;
4299 if (GET_CODE (parmreg) == CONCAT)
4301 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4303 regnor = REGNO (gen_realpart (submode, parmreg));
4304 regnoi = REGNO (gen_imagpart (submode, parmreg));
4306 if (stack_parm != 0)
4308 parm_reg_stack_loc[regnor]
4309 = gen_realpart (submode, stack_parm);
4310 parm_reg_stack_loc[regnoi]
4311 = gen_imagpart (submode, stack_parm);
4313 else
4315 parm_reg_stack_loc[regnor] = 0;
4316 parm_reg_stack_loc[regnoi] = 0;
4319 else
4320 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4322 /* Mark the register as eliminable if we did no conversion
4323 and it was copied from memory at a fixed offset,
4324 and the arg pointer was not copied to a pseudo-reg.
4325 If the arg pointer is a pseudo reg or the offset formed
4326 an invalid address, such memory-equivalences
4327 as we make here would screw up life analysis for it. */
4328 if (nominal_mode == passed_mode
4329 && ! did_conversion
4330 && stack_parm != 0
4331 && GET_CODE (stack_parm) == MEM
4332 && stack_offset.var == 0
4333 && reg_mentioned_p (virtual_incoming_args_rtx,
4334 XEXP (stack_parm, 0)))
4336 rtx linsn = get_last_insn ();
4337 rtx sinsn, set;
4339 /* Mark complex types separately. */
4340 if (GET_CODE (parmreg) == CONCAT)
4341 /* Scan backwards for the set of the real and
4342 imaginary parts. */
4343 for (sinsn = linsn; sinsn != 0;
4344 sinsn = prev_nonnote_insn (sinsn))
4346 set = single_set (sinsn);
4347 if (set != 0
4348 && SET_DEST (set) == regno_reg_rtx [regnoi])
4349 REG_NOTES (sinsn)
4350 = gen_rtx_EXPR_LIST (REG_EQUIV,
4351 parm_reg_stack_loc[regnoi],
4352 REG_NOTES (sinsn));
4353 else if (set != 0
4354 && SET_DEST (set) == regno_reg_rtx [regnor])
4355 REG_NOTES (sinsn)
4356 = gen_rtx_EXPR_LIST (REG_EQUIV,
4357 parm_reg_stack_loc[regnor],
4358 REG_NOTES (sinsn));
4360 else if ((set = single_set (linsn)) != 0
4361 && SET_DEST (set) == parmreg)
4362 REG_NOTES (linsn)
4363 = gen_rtx_EXPR_LIST (REG_EQUIV,
4364 stack_parm, REG_NOTES (linsn));
4367 /* For pointer data type, suggest pointer register. */
4368 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4369 mark_reg_pointer (parmreg,
4370 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4371 / BITS_PER_UNIT));
4373 else
4375 /* Value must be stored in the stack slot STACK_PARM
4376 during function execution. */
4378 if (promoted_mode != nominal_mode)
4380 /* Conversion is required. */
4381 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4383 emit_move_insn (tempreg, validize_mem (entry_parm));
4385 push_to_sequence (conversion_insns);
4386 entry_parm = convert_to_mode (nominal_mode, tempreg,
4387 TREE_UNSIGNED (TREE_TYPE (parm)));
4388 if (stack_parm)
4390 /* ??? This may need a big-endian conversion on sparc64. */
4391 stack_parm = change_address (stack_parm, nominal_mode,
4392 NULL_RTX);
4394 conversion_insns = get_insns ();
4395 did_conversion = 1;
4396 end_sequence ();
4399 if (entry_parm != stack_parm)
4401 if (stack_parm == 0)
4403 stack_parm
4404 = assign_stack_local (GET_MODE (entry_parm),
4405 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4406 /* If this is a memory ref that contains aggregate components,
4407 mark it as such for cse and loop optimize. */
4408 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4411 if (promoted_mode != nominal_mode)
4413 push_to_sequence (conversion_insns);
4414 emit_move_insn (validize_mem (stack_parm),
4415 validize_mem (entry_parm));
4416 conversion_insns = get_insns ();
4417 end_sequence ();
4419 else
4420 emit_move_insn (validize_mem (stack_parm),
4421 validize_mem (entry_parm));
4423 if (flag_check_memory_usage)
4425 push_to_sequence (conversion_insns);
4426 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4427 XEXP (stack_parm, 0), ptr_mode,
4428 GEN_INT (GET_MODE_SIZE (GET_MODE
4429 (entry_parm))),
4430 TYPE_MODE (sizetype),
4431 GEN_INT (MEMORY_USE_RW),
4432 TYPE_MODE (integer_type_node));
4434 conversion_insns = get_insns ();
4435 end_sequence ();
4437 DECL_RTL (parm) = stack_parm;
4440 /* If this "parameter" was the place where we are receiving the
4441 function's incoming structure pointer, set up the result. */
4442 if (parm == function_result_decl)
4444 tree result = DECL_RESULT (fndecl);
4445 tree restype = TREE_TYPE (result);
4447 DECL_RTL (result)
4448 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4450 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4453 if (TREE_THIS_VOLATILE (parm))
4454 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4455 if (TREE_READONLY (parm))
4456 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4459 /* Output all parameter conversion instructions (possibly including calls)
4460 now that all parameters have been copied out of hard registers. */
4461 emit_insns (conversion_insns);
4463 last_parm_insn = get_last_insn ();
4465 current_function_args_size = stack_args_size.constant;
4467 /* Adjust function incoming argument size for alignment and
4468 minimum length. */
4470 #ifdef REG_PARM_STACK_SPACE
4471 #ifndef MAYBE_REG_PARM_STACK_SPACE
4472 current_function_args_size = MAX (current_function_args_size,
4473 REG_PARM_STACK_SPACE (fndecl));
4474 #endif
4475 #endif
4477 #ifdef STACK_BOUNDARY
4478 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4480 current_function_args_size
4481 = ((current_function_args_size + STACK_BYTES - 1)
4482 / STACK_BYTES) * STACK_BYTES;
4483 #endif
4485 #ifdef ARGS_GROW_DOWNWARD
4486 current_function_arg_offset_rtx
4487 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4488 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4489 size_int (-stack_args_size.constant)),
4490 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4491 #else
4492 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4493 #endif
4495 /* See how many bytes, if any, of its args a function should try to pop
4496 on return. */
4498 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4499 current_function_args_size);
4501 /* For stdarg.h function, save info about
4502 regs and stack space used by the named args. */
4504 if (!hide_last_arg)
4505 current_function_args_info = args_so_far;
4507 /* Set the rtx used for the function return value. Put this in its
4508 own variable so any optimizers that need this information don't have
4509 to include tree.h. Do this here so it gets done when an inlined
4510 function gets output. */
4512 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4515 /* Indicate whether REGNO is an incoming argument to the current function
4516 that was promoted to a wider mode. If so, return the RTX for the
4517 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4518 that REGNO is promoted from and whether the promotion was signed or
4519 unsigned. */
4521 #ifdef PROMOTE_FUNCTION_ARGS
4524 promoted_input_arg (regno, pmode, punsignedp)
4525 int regno;
4526 enum machine_mode *pmode;
4527 int *punsignedp;
4529 tree arg;
4531 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4532 arg = TREE_CHAIN (arg))
4533 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4534 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4535 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4537 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4538 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4540 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4541 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4542 && mode != DECL_MODE (arg))
4544 *pmode = DECL_MODE (arg);
4545 *punsignedp = unsignedp;
4546 return DECL_INCOMING_RTL (arg);
4550 return 0;
4553 #endif
4555 /* Compute the size and offset from the start of the stacked arguments for a
4556 parm passed in mode PASSED_MODE and with type TYPE.
4558 INITIAL_OFFSET_PTR points to the current offset into the stacked
4559 arguments.
4561 The starting offset and size for this parm are returned in *OFFSET_PTR
4562 and *ARG_SIZE_PTR, respectively.
4564 IN_REGS is non-zero if the argument will be passed in registers. It will
4565 never be set if REG_PARM_STACK_SPACE is not defined.
4567 FNDECL is the function in which the argument was defined.
4569 There are two types of rounding that are done. The first, controlled by
4570 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4571 list to be aligned to the specific boundary (in bits). This rounding
4572 affects the initial and starting offsets, but not the argument size.
4574 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4575 optionally rounds the size of the parm to PARM_BOUNDARY. The
4576 initial offset is not affected by this rounding, while the size always
4577 is and the starting offset may be. */
4579 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4580 initial_offset_ptr is positive because locate_and_pad_parm's
4581 callers pass in the total size of args so far as
4582 initial_offset_ptr. arg_size_ptr is always positive.*/
4584 void
4585 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4586 initial_offset_ptr, offset_ptr, arg_size_ptr)
4587 enum machine_mode passed_mode;
4588 tree type;
4589 int in_regs;
4590 tree fndecl;
4591 struct args_size *initial_offset_ptr;
4592 struct args_size *offset_ptr;
4593 struct args_size *arg_size_ptr;
4595 tree sizetree
4596 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4597 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4598 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4600 #ifdef REG_PARM_STACK_SPACE
4601 /* If we have found a stack parm before we reach the end of the
4602 area reserved for registers, skip that area. */
4603 if (! in_regs)
4605 int reg_parm_stack_space = 0;
4607 #ifdef MAYBE_REG_PARM_STACK_SPACE
4608 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4609 #else
4610 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4611 #endif
4612 if (reg_parm_stack_space > 0)
4614 if (initial_offset_ptr->var)
4616 initial_offset_ptr->var
4617 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4618 size_int (reg_parm_stack_space));
4619 initial_offset_ptr->constant = 0;
4621 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4622 initial_offset_ptr->constant = reg_parm_stack_space;
4625 #endif /* REG_PARM_STACK_SPACE */
4627 arg_size_ptr->var = 0;
4628 arg_size_ptr->constant = 0;
4630 #ifdef ARGS_GROW_DOWNWARD
4631 if (initial_offset_ptr->var)
4633 offset_ptr->constant = 0;
4634 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4635 initial_offset_ptr->var);
4637 else
4639 offset_ptr->constant = - initial_offset_ptr->constant;
4640 offset_ptr->var = 0;
4642 if (where_pad != none
4643 && (TREE_CODE (sizetree) != INTEGER_CST
4644 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4645 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4646 SUB_PARM_SIZE (*offset_ptr, sizetree);
4647 if (where_pad != downward)
4648 pad_to_arg_alignment (offset_ptr, boundary);
4649 if (initial_offset_ptr->var)
4651 arg_size_ptr->var = size_binop (MINUS_EXPR,
4652 size_binop (MINUS_EXPR,
4653 integer_zero_node,
4654 initial_offset_ptr->var),
4655 offset_ptr->var);
4657 else
4659 arg_size_ptr->constant = (- initial_offset_ptr->constant
4660 - offset_ptr->constant);
4662 #else /* !ARGS_GROW_DOWNWARD */
4663 pad_to_arg_alignment (initial_offset_ptr, boundary);
4664 *offset_ptr = *initial_offset_ptr;
4666 #ifdef PUSH_ROUNDING
4667 if (passed_mode != BLKmode)
4668 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4669 #endif
4671 /* Pad_below needs the pre-rounded size to know how much to pad below
4672 so this must be done before rounding up. */
4673 if (where_pad == downward
4674 /* However, BLKmode args passed in regs have their padding done elsewhere.
4675 The stack slot must be able to hold the entire register. */
4676 && !(in_regs && passed_mode == BLKmode))
4677 pad_below (offset_ptr, passed_mode, sizetree);
4679 if (where_pad != none
4680 && (TREE_CODE (sizetree) != INTEGER_CST
4681 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4682 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4684 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4685 #endif /* ARGS_GROW_DOWNWARD */
4688 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4689 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4691 static void
4692 pad_to_arg_alignment (offset_ptr, boundary)
4693 struct args_size *offset_ptr;
4694 int boundary;
4696 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4698 if (boundary > BITS_PER_UNIT)
4700 if (offset_ptr->var)
4702 offset_ptr->var =
4703 #ifdef ARGS_GROW_DOWNWARD
4704 round_down
4705 #else
4706 round_up
4707 #endif
4708 (ARGS_SIZE_TREE (*offset_ptr),
4709 boundary / BITS_PER_UNIT);
4710 offset_ptr->constant = 0; /*?*/
4712 else
4713 offset_ptr->constant =
4714 #ifdef ARGS_GROW_DOWNWARD
4715 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4716 #else
4717 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4718 #endif
4722 #ifndef ARGS_GROW_DOWNWARD
4723 static void
4724 pad_below (offset_ptr, passed_mode, sizetree)
4725 struct args_size *offset_ptr;
4726 enum machine_mode passed_mode;
4727 tree sizetree;
4729 if (passed_mode != BLKmode)
4731 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4732 offset_ptr->constant
4733 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4734 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4735 - GET_MODE_SIZE (passed_mode));
4737 else
4739 if (TREE_CODE (sizetree) != INTEGER_CST
4740 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4742 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4743 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4744 /* Add it in. */
4745 ADD_PARM_SIZE (*offset_ptr, s2);
4746 SUB_PARM_SIZE (*offset_ptr, sizetree);
4750 #endif
4752 #ifdef ARGS_GROW_DOWNWARD
4753 static tree
4754 round_down (value, divisor)
4755 tree value;
4756 int divisor;
4758 return size_binop (MULT_EXPR,
4759 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4760 size_int (divisor));
4762 #endif
4764 /* Walk the tree of blocks describing the binding levels within a function
4765 and warn about uninitialized variables.
4766 This is done after calling flow_analysis and before global_alloc
4767 clobbers the pseudo-regs to hard regs. */
4769 void
4770 uninitialized_vars_warning (block)
4771 tree block;
4773 register tree decl, sub;
4774 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4776 if (TREE_CODE (decl) == VAR_DECL
4777 /* These warnings are unreliable for and aggregates
4778 because assigning the fields one by one can fail to convince
4779 flow.c that the entire aggregate was initialized.
4780 Unions are troublesome because members may be shorter. */
4781 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4782 && DECL_RTL (decl) != 0
4783 && GET_CODE (DECL_RTL (decl)) == REG
4784 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4785 warning_with_decl (decl,
4786 "`%s' might be used uninitialized in this function");
4787 if (TREE_CODE (decl) == VAR_DECL
4788 && DECL_RTL (decl) != 0
4789 && GET_CODE (DECL_RTL (decl)) == REG
4790 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4791 warning_with_decl (decl,
4792 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4794 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4795 uninitialized_vars_warning (sub);
4798 /* Do the appropriate part of uninitialized_vars_warning
4799 but for arguments instead of local variables. */
4801 void
4802 setjmp_args_warning ()
4804 register tree decl;
4805 for (decl = DECL_ARGUMENTS (current_function_decl);
4806 decl; decl = TREE_CHAIN (decl))
4807 if (DECL_RTL (decl) != 0
4808 && GET_CODE (DECL_RTL (decl)) == REG
4809 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4810 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4813 /* If this function call setjmp, put all vars into the stack
4814 unless they were declared `register'. */
4816 void
4817 setjmp_protect (block)
4818 tree block;
4820 register tree decl, sub;
4821 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4822 if ((TREE_CODE (decl) == VAR_DECL
4823 || TREE_CODE (decl) == PARM_DECL)
4824 && DECL_RTL (decl) != 0
4825 && (GET_CODE (DECL_RTL (decl)) == REG
4826 || (GET_CODE (DECL_RTL (decl)) == MEM
4827 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4828 /* If this variable came from an inline function, it must be
4829 that it's life doesn't overlap the setjmp. If there was a
4830 setjmp in the function, it would already be in memory. We
4831 must exclude such variable because their DECL_RTL might be
4832 set to strange things such as virtual_stack_vars_rtx. */
4833 && ! DECL_FROM_INLINE (decl)
4834 && (
4835 #ifdef NON_SAVING_SETJMP
4836 /* If longjmp doesn't restore the registers,
4837 don't put anything in them. */
4838 NON_SAVING_SETJMP
4840 #endif
4841 ! DECL_REGISTER (decl)))
4842 put_var_into_stack (decl);
4843 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4844 setjmp_protect (sub);
4847 /* Like the previous function, but for args instead of local variables. */
4849 void
4850 setjmp_protect_args ()
4852 register tree decl;
4853 for (decl = DECL_ARGUMENTS (current_function_decl);
4854 decl; decl = TREE_CHAIN (decl))
4855 if ((TREE_CODE (decl) == VAR_DECL
4856 || TREE_CODE (decl) == PARM_DECL)
4857 && DECL_RTL (decl) != 0
4858 && (GET_CODE (DECL_RTL (decl)) == REG
4859 || (GET_CODE (DECL_RTL (decl)) == MEM
4860 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4861 && (
4862 /* If longjmp doesn't restore the registers,
4863 don't put anything in them. */
4864 #ifdef NON_SAVING_SETJMP
4865 NON_SAVING_SETJMP
4867 #endif
4868 ! DECL_REGISTER (decl)))
4869 put_var_into_stack (decl);
4872 /* Return the context-pointer register corresponding to DECL,
4873 or 0 if it does not need one. */
4876 lookup_static_chain (decl)
4877 tree decl;
4879 tree context = decl_function_context (decl);
4880 tree link;
4882 if (context == 0
4883 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4884 return 0;
4886 /* We treat inline_function_decl as an alias for the current function
4887 because that is the inline function whose vars, types, etc.
4888 are being merged into the current function.
4889 See expand_inline_function. */
4890 if (context == current_function_decl || context == inline_function_decl)
4891 return virtual_stack_vars_rtx;
4893 for (link = context_display; link; link = TREE_CHAIN (link))
4894 if (TREE_PURPOSE (link) == context)
4895 return RTL_EXPR_RTL (TREE_VALUE (link));
4897 abort ();
4900 /* Convert a stack slot address ADDR for variable VAR
4901 (from a containing function)
4902 into an address valid in this function (using a static chain). */
4905 fix_lexical_addr (addr, var)
4906 rtx addr;
4907 tree var;
4909 rtx basereg;
4910 HOST_WIDE_INT displacement;
4911 tree context = decl_function_context (var);
4912 struct function *fp;
4913 rtx base = 0;
4915 /* If this is the present function, we need not do anything. */
4916 if (context == current_function_decl || context == inline_function_decl)
4917 return addr;
4919 for (fp = outer_function_chain; fp; fp = fp->next)
4920 if (fp->decl == context)
4921 break;
4923 if (fp == 0)
4924 abort ();
4926 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4927 addr = XEXP (XEXP (addr, 0), 0);
4929 /* Decode given address as base reg plus displacement. */
4930 if (GET_CODE (addr) == REG)
4931 basereg = addr, displacement = 0;
4932 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4933 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4934 else
4935 abort ();
4937 /* We accept vars reached via the containing function's
4938 incoming arg pointer and via its stack variables pointer. */
4939 if (basereg == fp->internal_arg_pointer)
4941 /* If reached via arg pointer, get the arg pointer value
4942 out of that function's stack frame.
4944 There are two cases: If a separate ap is needed, allocate a
4945 slot in the outer function for it and dereference it that way.
4946 This is correct even if the real ap is actually a pseudo.
4947 Otherwise, just adjust the offset from the frame pointer to
4948 compensate. */
4950 #ifdef NEED_SEPARATE_AP
4951 rtx addr;
4953 if (fp->arg_pointer_save_area == 0)
4954 fp->arg_pointer_save_area
4955 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4957 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4958 addr = memory_address (Pmode, addr);
4960 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4961 #else
4962 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4963 base = lookup_static_chain (var);
4964 #endif
4967 else if (basereg == virtual_stack_vars_rtx)
4969 /* This is the same code as lookup_static_chain, duplicated here to
4970 avoid an extra call to decl_function_context. */
4971 tree link;
4973 for (link = context_display; link; link = TREE_CHAIN (link))
4974 if (TREE_PURPOSE (link) == context)
4976 base = RTL_EXPR_RTL (TREE_VALUE (link));
4977 break;
4981 if (base == 0)
4982 abort ();
4984 /* Use same offset, relative to appropriate static chain or argument
4985 pointer. */
4986 return plus_constant (base, displacement);
4989 /* Return the address of the trampoline for entering nested fn FUNCTION.
4990 If necessary, allocate a trampoline (in the stack frame)
4991 and emit rtl to initialize its contents (at entry to this function). */
4994 trampoline_address (function)
4995 tree function;
4997 tree link;
4998 tree rtlexp;
4999 rtx tramp;
5000 struct function *fp;
5001 tree fn_context;
5003 /* Find an existing trampoline and return it. */
5004 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5005 if (TREE_PURPOSE (link) == function)
5006 return
5007 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5009 for (fp = outer_function_chain; fp; fp = fp->next)
5010 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5011 if (TREE_PURPOSE (link) == function)
5013 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5014 function);
5015 return round_trampoline_addr (tramp);
5018 /* None exists; we must make one. */
5020 /* Find the `struct function' for the function containing FUNCTION. */
5021 fp = 0;
5022 fn_context = decl_function_context (function);
5023 if (fn_context != current_function_decl
5024 && fn_context != inline_function_decl)
5025 for (fp = outer_function_chain; fp; fp = fp->next)
5026 if (fp->decl == fn_context)
5027 break;
5029 /* Allocate run-time space for this trampoline
5030 (usually in the defining function's stack frame). */
5031 #ifdef ALLOCATE_TRAMPOLINE
5032 tramp = ALLOCATE_TRAMPOLINE (fp);
5033 #else
5034 /* If rounding needed, allocate extra space
5035 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5036 #ifdef TRAMPOLINE_ALIGNMENT
5037 #define TRAMPOLINE_REAL_SIZE \
5038 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5039 #else
5040 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5041 #endif
5042 if (fp != 0)
5043 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5044 else
5045 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5046 #endif
5048 /* Record the trampoline for reuse and note it for later initialization
5049 by expand_function_end. */
5050 if (fp != 0)
5052 push_obstacks (fp->function_maybepermanent_obstack,
5053 fp->function_maybepermanent_obstack);
5054 rtlexp = make_node (RTL_EXPR);
5055 RTL_EXPR_RTL (rtlexp) = tramp;
5056 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5057 pop_obstacks ();
5059 else
5061 /* Make the RTL_EXPR node temporary, not momentary, so that the
5062 trampoline_list doesn't become garbage. */
5063 int momentary = suspend_momentary ();
5064 rtlexp = make_node (RTL_EXPR);
5065 resume_momentary (momentary);
5067 RTL_EXPR_RTL (rtlexp) = tramp;
5068 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5071 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5072 return round_trampoline_addr (tramp);
5075 /* Given a trampoline address,
5076 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5078 static rtx
5079 round_trampoline_addr (tramp)
5080 rtx tramp;
5082 #ifdef TRAMPOLINE_ALIGNMENT
5083 /* Round address up to desired boundary. */
5084 rtx temp = gen_reg_rtx (Pmode);
5085 temp = expand_binop (Pmode, add_optab, tramp,
5086 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5087 temp, 0, OPTAB_LIB_WIDEN);
5088 tramp = expand_binop (Pmode, and_optab, temp,
5089 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5090 temp, 0, OPTAB_LIB_WIDEN);
5091 #endif
5092 return tramp;
5095 /* The functions identify_blocks and reorder_blocks provide a way to
5096 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5097 duplicate portions of the RTL code. Call identify_blocks before
5098 changing the RTL, and call reorder_blocks after. */
5100 /* Put all this function's BLOCK nodes including those that are chained
5101 onto the first block into a vector, and return it.
5102 Also store in each NOTE for the beginning or end of a block
5103 the index of that block in the vector.
5104 The arguments are BLOCK, the chain of top-level blocks of the function,
5105 and INSNS, the insn chain of the function. */
5107 tree *
5108 identify_blocks (block, insns)
5109 tree block;
5110 rtx insns;
5112 int n_blocks;
5113 tree *block_vector;
5114 int *block_stack;
5115 int depth = 0;
5116 int next_block_number = 1;
5117 int current_block_number = 1;
5118 rtx insn;
5120 if (block == 0)
5121 return 0;
5123 n_blocks = all_blocks (block, 0);
5124 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5125 block_stack = (int *) alloca (n_blocks * sizeof (int));
5127 all_blocks (block, block_vector);
5129 for (insn = insns; insn; insn = NEXT_INSN (insn))
5130 if (GET_CODE (insn) == NOTE)
5132 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5134 block_stack[depth++] = current_block_number;
5135 current_block_number = next_block_number;
5136 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5138 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5140 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5141 current_block_number = block_stack[--depth];
5145 if (n_blocks != next_block_number)
5146 abort ();
5148 return block_vector;
5151 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5152 and a revised instruction chain, rebuild the tree structure
5153 of BLOCK nodes to correspond to the new order of RTL.
5154 The new block tree is inserted below TOP_BLOCK.
5155 Returns the current top-level block. */
5157 tree
5158 reorder_blocks (block_vector, block, insns)
5159 tree *block_vector;
5160 tree block;
5161 rtx insns;
5163 tree current_block = block;
5164 rtx insn;
5166 if (block_vector == 0)
5167 return block;
5169 /* Prune the old trees away, so that it doesn't get in the way. */
5170 BLOCK_SUBBLOCKS (current_block) = 0;
5171 BLOCK_CHAIN (current_block) = 0;
5173 for (insn = insns; insn; insn = NEXT_INSN (insn))
5174 if (GET_CODE (insn) == NOTE)
5176 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5178 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5179 /* If we have seen this block before, copy it. */
5180 if (TREE_ASM_WRITTEN (block))
5181 block = copy_node (block);
5182 BLOCK_SUBBLOCKS (block) = 0;
5183 TREE_ASM_WRITTEN (block) = 1;
5184 BLOCK_SUPERCONTEXT (block) = current_block;
5185 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5186 BLOCK_SUBBLOCKS (current_block) = block;
5187 current_block = block;
5188 NOTE_SOURCE_FILE (insn) = 0;
5190 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5192 BLOCK_SUBBLOCKS (current_block)
5193 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5194 current_block = BLOCK_SUPERCONTEXT (current_block);
5195 NOTE_SOURCE_FILE (insn) = 0;
5199 BLOCK_SUBBLOCKS (current_block)
5200 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5201 return current_block;
5204 /* Reverse the order of elements in the chain T of blocks,
5205 and return the new head of the chain (old last element). */
5207 static tree
5208 blocks_nreverse (t)
5209 tree t;
5211 register tree prev = 0, decl, next;
5212 for (decl = t; decl; decl = next)
5214 next = BLOCK_CHAIN (decl);
5215 BLOCK_CHAIN (decl) = prev;
5216 prev = decl;
5218 return prev;
5221 /* Count the subblocks of the list starting with BLOCK, and list them
5222 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5223 blocks. */
5225 static int
5226 all_blocks (block, vector)
5227 tree block;
5228 tree *vector;
5230 int n_blocks = 0;
5232 while (block)
5234 TREE_ASM_WRITTEN (block) = 0;
5236 /* Record this block. */
5237 if (vector)
5238 vector[n_blocks] = block;
5240 ++n_blocks;
5242 /* Record the subblocks, and their subblocks... */
5243 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5244 vector ? vector + n_blocks : 0);
5245 block = BLOCK_CHAIN (block);
5248 return n_blocks;
5251 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5252 and initialize static variables for generating RTL for the statements
5253 of the function. */
5255 void
5256 init_function_start (subr, filename, line)
5257 tree subr;
5258 char *filename;
5259 int line;
5261 init_stmt_for_function ();
5263 cse_not_expected = ! optimize;
5265 /* Caller save not needed yet. */
5266 caller_save_needed = 0;
5268 /* No stack slots have been made yet. */
5269 stack_slot_list = 0;
5271 /* There is no stack slot for handling nonlocal gotos. */
5272 nonlocal_goto_handler_slot = 0;
5273 nonlocal_goto_stack_level = 0;
5275 /* No labels have been declared for nonlocal use. */
5276 nonlocal_labels = 0;
5278 /* No function calls so far in this function. */
5279 function_call_count = 0;
5281 /* No parm regs have been allocated.
5282 (This is important for output_inline_function.) */
5283 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5285 /* Initialize the RTL mechanism. */
5286 init_emit ();
5288 /* Initialize the queue of pending postincrement and postdecrements,
5289 and some other info in expr.c. */
5290 init_expr ();
5292 /* We haven't done register allocation yet. */
5293 reg_renumber = 0;
5295 init_const_rtx_hash_table ();
5297 current_function_name = (*decl_printable_name) (subr, 2);
5299 /* Nonzero if this is a nested function that uses a static chain. */
5301 current_function_needs_context
5302 = (decl_function_context (current_function_decl) != 0
5303 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5305 /* Set if a call to setjmp is seen. */
5306 current_function_calls_setjmp = 0;
5308 /* Set if a call to longjmp is seen. */
5309 current_function_calls_longjmp = 0;
5311 current_function_calls_alloca = 0;
5312 current_function_has_nonlocal_label = 0;
5313 current_function_has_nonlocal_goto = 0;
5314 current_function_contains_functions = 0;
5315 current_function_is_thunk = 0;
5317 current_function_returns_pcc_struct = 0;
5318 current_function_returns_struct = 0;
5319 current_function_epilogue_delay_list = 0;
5320 current_function_uses_const_pool = 0;
5321 current_function_uses_pic_offset_table = 0;
5323 /* We have not yet needed to make a label to jump to for tail-recursion. */
5324 tail_recursion_label = 0;
5326 /* We haven't had a need to make a save area for ap yet. */
5328 arg_pointer_save_area = 0;
5330 /* No stack slots allocated yet. */
5331 frame_offset = 0;
5333 /* No SAVE_EXPRs in this function yet. */
5334 save_expr_regs = 0;
5336 /* No RTL_EXPRs in this function yet. */
5337 rtl_expr_chain = 0;
5339 /* Set up to allocate temporaries. */
5340 init_temp_slots ();
5342 /* Within function body, compute a type's size as soon it is laid out. */
5343 immediate_size_expand++;
5345 /* We haven't made any trampolines for this function yet. */
5346 trampoline_list = 0;
5348 init_pending_stack_adjust ();
5349 inhibit_defer_pop = 0;
5351 current_function_outgoing_args_size = 0;
5353 /* Prevent ever trying to delete the first instruction of a function.
5354 Also tell final how to output a linenum before the function prologue.
5355 Note linenums could be missing, e.g. when compiling a Java .class file. */
5356 if (line > 0)
5357 emit_line_note (filename, line);
5359 /* Make sure first insn is a note even if we don't want linenums.
5360 This makes sure the first insn will never be deleted.
5361 Also, final expects a note to appear there. */
5362 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5364 /* Set flags used by final.c. */
5365 if (aggregate_value_p (DECL_RESULT (subr)))
5367 #ifdef PCC_STATIC_STRUCT_RETURN
5368 current_function_returns_pcc_struct = 1;
5369 #endif
5370 current_function_returns_struct = 1;
5373 /* Warn if this value is an aggregate type,
5374 regardless of which calling convention we are using for it. */
5375 if (warn_aggregate_return
5376 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5377 warning ("function returns an aggregate");
5379 current_function_returns_pointer
5380 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5382 /* Indicate that we need to distinguish between the return value of the
5383 present function and the return value of a function being called. */
5384 rtx_equal_function_value_matters = 1;
5386 /* Indicate that we have not instantiated virtual registers yet. */
5387 virtuals_instantiated = 0;
5389 /* Indicate we have no need of a frame pointer yet. */
5390 frame_pointer_needed = 0;
5392 /* By default assume not varargs or stdarg. */
5393 current_function_varargs = 0;
5394 current_function_stdarg = 0;
5397 /* Indicate that the current function uses extra args
5398 not explicitly mentioned in the argument list in any fashion. */
5400 void
5401 mark_varargs ()
5403 current_function_varargs = 1;
5406 /* Expand a call to __main at the beginning of a possible main function. */
5408 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5409 #undef HAS_INIT_SECTION
5410 #define HAS_INIT_SECTION
5411 #endif
5413 void
5414 expand_main_function ()
5416 #if !defined (HAS_INIT_SECTION)
5417 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5418 VOIDmode, 0);
5419 #endif /* not HAS_INIT_SECTION */
5422 extern struct obstack permanent_obstack;
5424 /* Start the RTL for a new function, and set variables used for
5425 emitting RTL.
5426 SUBR is the FUNCTION_DECL node.
5427 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5428 the function's parameters, which must be run at any return statement. */
5430 void
5431 expand_function_start (subr, parms_have_cleanups)
5432 tree subr;
5433 int parms_have_cleanups;
5435 register int i;
5436 tree tem;
5437 rtx last_ptr = NULL_RTX;
5439 /* Make sure volatile mem refs aren't considered
5440 valid operands of arithmetic insns. */
5441 init_recog_no_volatile ();
5443 /* If function gets a static chain arg, store it in the stack frame.
5444 Do this first, so it gets the first stack slot offset. */
5445 if (current_function_needs_context)
5447 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5449 /* Delay copying static chain if it is not a register to avoid
5450 conflicts with regs used for parameters. */
5451 if (! SMALL_REGISTER_CLASSES
5452 || GET_CODE (static_chain_incoming_rtx) == REG)
5453 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5456 /* If the parameters of this function need cleaning up, get a label
5457 for the beginning of the code which executes those cleanups. This must
5458 be done before doing anything with return_label. */
5459 if (parms_have_cleanups)
5460 cleanup_label = gen_label_rtx ();
5461 else
5462 cleanup_label = 0;
5464 /* Make the label for return statements to jump to, if this machine
5465 does not have a one-instruction return and uses an epilogue,
5466 or if it returns a structure, or if it has parm cleanups. */
5467 #ifdef HAVE_return
5468 if (cleanup_label == 0 && HAVE_return
5469 && ! current_function_returns_pcc_struct
5470 && ! (current_function_returns_struct && ! optimize))
5471 return_label = 0;
5472 else
5473 return_label = gen_label_rtx ();
5474 #else
5475 return_label = gen_label_rtx ();
5476 #endif
5478 /* Initialize rtx used to return the value. */
5479 /* Do this before assign_parms so that we copy the struct value address
5480 before any library calls that assign parms might generate. */
5482 /* Decide whether to return the value in memory or in a register. */
5483 if (aggregate_value_p (DECL_RESULT (subr)))
5485 /* Returning something that won't go in a register. */
5486 register rtx value_address = 0;
5488 #ifdef PCC_STATIC_STRUCT_RETURN
5489 if (current_function_returns_pcc_struct)
5491 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5492 value_address = assemble_static_space (size);
5494 else
5495 #endif
5497 /* Expect to be passed the address of a place to store the value.
5498 If it is passed as an argument, assign_parms will take care of
5499 it. */
5500 if (struct_value_incoming_rtx)
5502 value_address = gen_reg_rtx (Pmode);
5503 emit_move_insn (value_address, struct_value_incoming_rtx);
5506 if (value_address)
5508 DECL_RTL (DECL_RESULT (subr))
5509 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5510 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5511 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5514 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5515 /* If return mode is void, this decl rtl should not be used. */
5516 DECL_RTL (DECL_RESULT (subr)) = 0;
5517 else if (parms_have_cleanups)
5519 /* If function will end with cleanup code for parms,
5520 compute the return values into a pseudo reg,
5521 which we will copy into the true return register
5522 after the cleanups are done. */
5524 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5526 #ifdef PROMOTE_FUNCTION_RETURN
5527 tree type = TREE_TYPE (DECL_RESULT (subr));
5528 int unsignedp = TREE_UNSIGNED (type);
5530 mode = promote_mode (type, mode, &unsignedp, 1);
5531 #endif
5533 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5535 else
5536 /* Scalar, returned in a register. */
5538 #ifdef FUNCTION_OUTGOING_VALUE
5539 DECL_RTL (DECL_RESULT (subr))
5540 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5541 #else
5542 DECL_RTL (DECL_RESULT (subr))
5543 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5544 #endif
5546 /* Mark this reg as the function's return value. */
5547 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5549 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5550 /* Needed because we may need to move this to memory
5551 in case it's a named return value whose address is taken. */
5552 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5556 /* Initialize rtx for parameters and local variables.
5557 In some cases this requires emitting insns. */
5559 assign_parms (subr, 0);
5561 /* Copy the static chain now if it wasn't a register. The delay is to
5562 avoid conflicts with the parameter passing registers. */
5564 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5565 if (GET_CODE (static_chain_incoming_rtx) != REG)
5566 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5568 /* The following was moved from init_function_start.
5569 The move is supposed to make sdb output more accurate. */
5570 /* Indicate the beginning of the function body,
5571 as opposed to parm setup. */
5572 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5574 /* If doing stupid allocation, mark parms as born here. */
5576 if (GET_CODE (get_last_insn ()) != NOTE)
5577 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5578 parm_birth_insn = get_last_insn ();
5580 if (obey_regdecls)
5582 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5583 use_variable (regno_reg_rtx[i]);
5585 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5586 use_variable (current_function_internal_arg_pointer);
5589 context_display = 0;
5590 if (current_function_needs_context)
5592 /* Fetch static chain values for containing functions. */
5593 tem = decl_function_context (current_function_decl);
5594 /* If not doing stupid register allocation copy the static chain
5595 pointer into a pseudo. If we have small register classes, copy
5596 the value from memory if static_chain_incoming_rtx is a REG. If
5597 we do stupid register allocation, we use the stack address
5598 generated above. */
5599 if (tem && ! obey_regdecls)
5601 /* If the static chain originally came in a register, put it back
5602 there, then move it out in the next insn. The reason for
5603 this peculiar code is to satisfy function integration. */
5604 if (SMALL_REGISTER_CLASSES
5605 && GET_CODE (static_chain_incoming_rtx) == REG)
5606 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5607 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5610 while (tem)
5612 tree rtlexp = make_node (RTL_EXPR);
5614 RTL_EXPR_RTL (rtlexp) = last_ptr;
5615 context_display = tree_cons (tem, rtlexp, context_display);
5616 tem = decl_function_context (tem);
5617 if (tem == 0)
5618 break;
5619 /* Chain thru stack frames, assuming pointer to next lexical frame
5620 is found at the place we always store it. */
5621 #ifdef FRAME_GROWS_DOWNWARD
5622 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5623 #endif
5624 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5625 memory_address (Pmode, last_ptr)));
5627 /* If we are not optimizing, ensure that we know that this
5628 piece of context is live over the entire function. */
5629 if (! optimize)
5630 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5631 save_expr_regs);
5635 /* After the display initializations is where the tail-recursion label
5636 should go, if we end up needing one. Ensure we have a NOTE here
5637 since some things (like trampolines) get placed before this. */
5638 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5640 /* Evaluate now the sizes of any types declared among the arguments. */
5641 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5643 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5644 EXPAND_MEMORY_USE_BAD);
5645 /* Flush the queue in case this parameter declaration has
5646 side-effects. */
5647 emit_queue ();
5650 /* Make sure there is a line number after the function entry setup code. */
5651 force_next_line_note ();
5654 /* Generate RTL for the end of the current function.
5655 FILENAME and LINE are the current position in the source file.
5657 It is up to language-specific callers to do cleanups for parameters--
5658 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5660 void
5661 expand_function_end (filename, line, end_bindings)
5662 char *filename;
5663 int line;
5664 int end_bindings;
5666 register int i;
5667 tree link;
5669 #ifdef TRAMPOLINE_TEMPLATE
5670 static rtx initial_trampoline;
5671 #endif
5673 #ifdef NON_SAVING_SETJMP
5674 /* Don't put any variables in registers if we call setjmp
5675 on a machine that fails to restore the registers. */
5676 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5678 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5679 setjmp_protect (DECL_INITIAL (current_function_decl));
5681 setjmp_protect_args ();
5683 #endif
5685 /* Save the argument pointer if a save area was made for it. */
5686 if (arg_pointer_save_area)
5688 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5689 emit_insn_before (x, tail_recursion_reentry);
5692 /* Initialize any trampolines required by this function. */
5693 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5695 tree function = TREE_PURPOSE (link);
5696 rtx context = lookup_static_chain (function);
5697 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5698 #ifdef TRAMPOLINE_TEMPLATE
5699 rtx blktramp;
5700 #endif
5701 rtx seq;
5703 #ifdef TRAMPOLINE_TEMPLATE
5704 /* First make sure this compilation has a template for
5705 initializing trampolines. */
5706 if (initial_trampoline == 0)
5708 end_temporary_allocation ();
5709 initial_trampoline
5710 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5711 resume_temporary_allocation ();
5713 #endif
5715 /* Generate insns to initialize the trampoline. */
5716 start_sequence ();
5717 tramp = round_trampoline_addr (XEXP (tramp, 0));
5718 #ifdef TRAMPOLINE_TEMPLATE
5719 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5720 emit_block_move (blktramp, initial_trampoline,
5721 GEN_INT (TRAMPOLINE_SIZE),
5722 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5723 #endif
5724 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5725 seq = get_insns ();
5726 end_sequence ();
5728 /* Put those insns at entry to the containing function (this one). */
5729 emit_insns_before (seq, tail_recursion_reentry);
5732 /* If we are doing stack checking and this function makes calls,
5733 do a stack probe at the start of the function to ensure we have enough
5734 space for another stack frame. */
5735 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5737 rtx insn, seq;
5739 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5740 if (GET_CODE (insn) == CALL_INSN)
5742 start_sequence ();
5743 probe_stack_range (STACK_CHECK_PROTECT,
5744 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5745 seq = get_insns ();
5746 end_sequence ();
5747 emit_insns_before (seq, tail_recursion_reentry);
5748 break;
5752 /* Warn about unused parms if extra warnings were specified. */
5753 if (warn_unused && extra_warnings)
5755 tree decl;
5757 for (decl = DECL_ARGUMENTS (current_function_decl);
5758 decl; decl = TREE_CHAIN (decl))
5759 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5760 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5761 warning_with_decl (decl, "unused parameter `%s'");
5764 /* Delete handlers for nonlocal gotos if nothing uses them. */
5765 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5766 delete_handlers ();
5768 /* End any sequences that failed to be closed due to syntax errors. */
5769 while (in_sequence_p ())
5770 end_sequence ();
5772 /* Outside function body, can't compute type's actual size
5773 until next function's body starts. */
5774 immediate_size_expand--;
5776 /* If doing stupid register allocation,
5777 mark register parms as dying here. */
5779 if (obey_regdecls)
5781 rtx tem;
5782 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5783 use_variable (regno_reg_rtx[i]);
5785 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5787 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5789 use_variable (XEXP (tem, 0));
5790 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5793 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5794 use_variable (current_function_internal_arg_pointer);
5797 clear_pending_stack_adjust ();
5798 do_pending_stack_adjust ();
5800 /* Mark the end of the function body.
5801 If control reaches this insn, the function can drop through
5802 without returning a value. */
5803 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5805 /* Must mark the last line number note in the function, so that the test
5806 coverage code can avoid counting the last line twice. This just tells
5807 the code to ignore the immediately following line note, since there
5808 already exists a copy of this note somewhere above. This line number
5809 note is still needed for debugging though, so we can't delete it. */
5810 if (flag_test_coverage)
5811 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5813 /* Output a linenumber for the end of the function.
5814 SDB depends on this. */
5815 emit_line_note_force (filename, line);
5817 /* Output the label for the actual return from the function,
5818 if one is expected. This happens either because a function epilogue
5819 is used instead of a return instruction, or because a return was done
5820 with a goto in order to run local cleanups, or because of pcc-style
5821 structure returning. */
5823 if (return_label)
5824 emit_label (return_label);
5826 /* C++ uses this. */
5827 if (end_bindings)
5828 expand_end_bindings (0, 0, 0);
5830 /* Now handle any leftover exception regions that may have been
5831 created for the parameters. */
5833 rtx last = get_last_insn ();
5834 rtx label;
5836 expand_leftover_cleanups ();
5838 /* If the above emitted any code, may sure we jump around it. */
5839 if (last != get_last_insn ())
5841 label = gen_label_rtx ();
5842 last = emit_jump_insn_after (gen_jump (label), last);
5843 last = emit_barrier_after (last);
5844 emit_label (label);
5848 /* If we had calls to alloca, and this machine needs
5849 an accurate stack pointer to exit the function,
5850 insert some code to save and restore the stack pointer. */
5851 #ifdef EXIT_IGNORE_STACK
5852 if (! EXIT_IGNORE_STACK)
5853 #endif
5854 if (current_function_calls_alloca)
5856 rtx tem = 0;
5858 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5859 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5862 /* If scalar return value was computed in a pseudo-reg,
5863 copy that to the hard return register. */
5864 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5865 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5866 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5867 >= FIRST_PSEUDO_REGISTER))
5869 rtx real_decl_result;
5871 #ifdef FUNCTION_OUTGOING_VALUE
5872 real_decl_result
5873 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5874 current_function_decl);
5875 #else
5876 real_decl_result
5877 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5878 current_function_decl);
5879 #endif
5880 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5881 /* If this is a BLKmode structure being returned in registers, then use
5882 the mode computed in expand_return. */
5883 if (GET_MODE (real_decl_result) == BLKmode)
5884 PUT_MODE (real_decl_result,
5885 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5886 emit_move_insn (real_decl_result,
5887 DECL_RTL (DECL_RESULT (current_function_decl)));
5888 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5890 /* The delay slot scheduler assumes that current_function_return_rtx
5891 holds the hard register containing the return value, not a temporary
5892 pseudo. */
5893 current_function_return_rtx = real_decl_result;
5896 /* If returning a structure, arrange to return the address of the value
5897 in a place where debuggers expect to find it.
5899 If returning a structure PCC style,
5900 the caller also depends on this value.
5901 And current_function_returns_pcc_struct is not necessarily set. */
5902 if (current_function_returns_struct
5903 || current_function_returns_pcc_struct)
5905 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5906 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5907 #ifdef FUNCTION_OUTGOING_VALUE
5908 rtx outgoing
5909 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5910 current_function_decl);
5911 #else
5912 rtx outgoing
5913 = FUNCTION_VALUE (build_pointer_type (type),
5914 current_function_decl);
5915 #endif
5917 /* Mark this as a function return value so integrate will delete the
5918 assignment and USE below when inlining this function. */
5919 REG_FUNCTION_VALUE_P (outgoing) = 1;
5921 emit_move_insn (outgoing, value_address);
5922 use_variable (outgoing);
5925 /* Output a return insn if we are using one.
5926 Otherwise, let the rtl chain end here, to drop through
5927 into the epilogue. */
5929 #ifdef HAVE_return
5930 if (HAVE_return)
5932 emit_jump_insn (gen_return ());
5933 emit_barrier ();
5935 #endif
5937 /* Fix up any gotos that jumped out to the outermost
5938 binding level of the function.
5939 Must follow emitting RETURN_LABEL. */
5941 /* If you have any cleanups to do at this point,
5942 and they need to create temporary variables,
5943 then you will lose. */
5944 expand_fixups (get_insns ());
5947 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5949 static int *prologue;
5950 static int *epilogue;
5952 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5953 or a single insn). */
5955 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5956 static int *
5957 record_insns (insns)
5958 rtx insns;
5960 int *vec;
5962 if (GET_CODE (insns) == SEQUENCE)
5964 int len = XVECLEN (insns, 0);
5965 vec = (int *) oballoc ((len + 1) * sizeof (int));
5966 vec[len] = 0;
5967 while (--len >= 0)
5968 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5970 else
5972 vec = (int *) oballoc (2 * sizeof (int));
5973 vec[0] = INSN_UID (insns);
5974 vec[1] = 0;
5976 return vec;
5979 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5981 static int
5982 contains (insn, vec)
5983 rtx insn;
5984 int *vec;
5986 register int i, j;
5988 if (GET_CODE (insn) == INSN
5989 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5991 int count = 0;
5992 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5993 for (j = 0; vec[j]; j++)
5994 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5995 count++;
5996 return count;
5998 else
6000 for (j = 0; vec[j]; j++)
6001 if (INSN_UID (insn) == vec[j])
6002 return 1;
6004 return 0;
6006 #endif /* HAVE_prologue || HAVE_epilogue */
6008 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6009 this into place with notes indicating where the prologue ends and where
6010 the epilogue begins. Update the basic block information when possible. */
6012 void
6013 thread_prologue_and_epilogue_insns (f)
6014 rtx f;
6016 #ifdef HAVE_prologue
6017 if (HAVE_prologue)
6019 rtx head, seq;
6021 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6022 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6023 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6024 seq = gen_prologue ();
6025 head = emit_insn_after (seq, f);
6027 /* Include the new prologue insns in the first block. Ignore them
6028 if they form a basic block unto themselves. */
6029 if (basic_block_head && n_basic_blocks
6030 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6031 basic_block_head[0] = NEXT_INSN (f);
6033 /* Retain a map of the prologue insns. */
6034 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6036 else
6037 #endif
6038 prologue = 0;
6040 #ifdef HAVE_epilogue
6041 if (HAVE_epilogue)
6043 rtx insn = get_last_insn ();
6044 rtx prev = prev_nonnote_insn (insn);
6046 /* If we end with a BARRIER, we don't need an epilogue. */
6047 if (! (prev && GET_CODE (prev) == BARRIER))
6049 rtx tail, seq, tem;
6050 rtx first_use = 0;
6051 rtx last_use = 0;
6053 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6054 epilogue insns, the USE insns at the end of a function,
6055 the jump insn that returns, and then a BARRIER. */
6057 /* Move the USE insns at the end of a function onto a list. */
6058 while (prev
6059 && GET_CODE (prev) == INSN
6060 && GET_CODE (PATTERN (prev)) == USE)
6062 tem = prev;
6063 prev = prev_nonnote_insn (prev);
6065 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6066 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6067 if (first_use)
6069 NEXT_INSN (tem) = first_use;
6070 PREV_INSN (first_use) = tem;
6072 first_use = tem;
6073 if (!last_use)
6074 last_use = tem;
6077 emit_barrier_after (insn);
6079 seq = gen_epilogue ();
6080 tail = emit_jump_insn_after (seq, insn);
6082 /* Insert the USE insns immediately before the return insn, which
6083 must be the first instruction before the final barrier. */
6084 if (first_use)
6086 tem = prev_nonnote_insn (get_last_insn ());
6087 NEXT_INSN (PREV_INSN (tem)) = first_use;
6088 PREV_INSN (first_use) = PREV_INSN (tem);
6089 PREV_INSN (tem) = last_use;
6090 NEXT_INSN (last_use) = tem;
6093 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6095 /* Include the new epilogue insns in the last block. Ignore
6096 them if they form a basic block unto themselves. */
6097 if (basic_block_end && n_basic_blocks
6098 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6099 basic_block_end[n_basic_blocks - 1] = tail;
6101 /* Retain a map of the epilogue insns. */
6102 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6103 return;
6106 #endif
6107 epilogue = 0;
6110 /* Reposition the prologue-end and epilogue-begin notes after instruction
6111 scheduling and delayed branch scheduling. */
6113 void
6114 reposition_prologue_and_epilogue_notes (f)
6115 rtx f;
6117 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6118 /* Reposition the prologue and epilogue notes. */
6119 if (n_basic_blocks)
6121 rtx next, prev;
6122 int len;
6124 if (prologue)
6126 register rtx insn, note = 0;
6128 /* Scan from the beginning until we reach the last prologue insn.
6129 We apparently can't depend on basic_block_{head,end} after
6130 reorg has run. */
6131 for (len = 0; prologue[len]; len++)
6133 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6135 if (GET_CODE (insn) == NOTE)
6137 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6138 note = insn;
6140 else if ((len -= contains (insn, prologue)) == 0)
6142 /* Find the prologue-end note if we haven't already, and
6143 move it to just after the last prologue insn. */
6144 if (note == 0)
6146 for (note = insn; (note = NEXT_INSN (note));)
6147 if (GET_CODE (note) == NOTE
6148 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6149 break;
6151 next = NEXT_INSN (note);
6152 prev = PREV_INSN (note);
6153 if (prev)
6154 NEXT_INSN (prev) = next;
6155 if (next)
6156 PREV_INSN (next) = prev;
6157 add_insn_after (note, insn);
6162 if (epilogue)
6164 register rtx insn, note = 0;
6166 /* Scan from the end until we reach the first epilogue insn.
6167 We apparently can't depend on basic_block_{head,end} after
6168 reorg has run. */
6169 for (len = 0; epilogue[len]; len++)
6171 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6173 if (GET_CODE (insn) == NOTE)
6175 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6176 note = insn;
6178 else if ((len -= contains (insn, epilogue)) == 0)
6180 /* Find the epilogue-begin note if we haven't already, and
6181 move it to just before the first epilogue insn. */
6182 if (note == 0)
6184 for (note = insn; (note = PREV_INSN (note));)
6185 if (GET_CODE (note) == NOTE
6186 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6187 break;
6189 next = NEXT_INSN (note);
6190 prev = PREV_INSN (note);
6191 if (prev)
6192 NEXT_INSN (prev) = next;
6193 if (next)
6194 PREV_INSN (next) = prev;
6195 add_insn_after (note, PREV_INSN (insn));
6200 #endif /* HAVE_prologue or HAVE_epilogue */