Daily bump.
[official-gcc.git] / gcc / function.c
blobf41982ec363f91805d0112cfdb2ebab8865a3206
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
63 #endif
65 #ifndef LOCAL_ALIGNMENT
66 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
67 #endif
69 /* Some systems use __main in a way incompatible with its use in gcc, in these
70 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
71 give the same symbol without quotes for an alternative entry point. You
72 must define both, or neither. */
73 #ifndef NAME__MAIN
74 #define NAME__MAIN "__main"
75 #define SYMBOL__MAIN __main
76 #endif
78 /* Round a value to the lowest integer less than it that is a multiple of
79 the required alignment. Avoid using division in case the value is
80 negative. Assume the alignment is a power of two. */
81 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83 /* Similar, but round to the next highest integer that meets the
84 alignment. */
85 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
88 during rtl generation. If they are different register numbers, this is
89 always true. It may also be true if
90 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
91 generation. See fix_lexical_addr for details. */
93 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
94 #define NEED_SEPARATE_AP
95 #endif
97 /* Number of bytes of args popped by function being compiled on its return.
98 Zero if no bytes are to be popped.
99 May affect compilation of return insn or of function epilogue. */
101 int current_function_pops_args;
103 /* Nonzero if function being compiled needs to be given an address
104 where the value should be stored. */
106 int current_function_returns_struct;
108 /* Nonzero if function being compiled needs to
109 return the address of where it has put a structure value. */
111 int current_function_returns_pcc_struct;
113 /* Nonzero if function being compiled needs to be passed a static chain. */
115 int current_function_needs_context;
117 /* Nonzero if function being compiled can call setjmp. */
119 int current_function_calls_setjmp;
121 /* Nonzero if function being compiled can call longjmp. */
123 int current_function_calls_longjmp;
125 /* Nonzero if function being compiled receives nonlocal gotos
126 from nested functions. */
128 int current_function_has_nonlocal_label;
130 /* Nonzero if function being compiled has nonlocal gotos to parent
131 function. */
133 int current_function_has_nonlocal_goto;
135 /* Nonzero if function being compiled contains nested functions. */
137 int current_function_contains_functions;
139 /* Nonzero if function being compiled doesn't contain any calls
140 (ignoring the prologue and epilogue). This is set prior to
141 local register allocation and is valid for the remaining
142 compiler passes. */
144 int current_function_is_leaf;
146 /* Nonzero if function being compiled doesn't modify the stack pointer
147 (ignoring the prologue and epilogue). This is only valid after
148 life_analysis has run. */
150 int current_function_sp_is_unchanging;
152 /* Nonzero if the function being compiled is a leaf function which only
153 uses leaf registers. This is valid after reload (specifically after
154 sched2) and is useful only if the port defines LEAF_REGISTERS. */
156 int current_function_uses_only_leaf_regs;
158 /* Nonzero if the function being compiled issues a computed jump. */
160 int current_function_has_computed_jump;
162 /* Nonzero if the current function is a thunk (a lightweight function that
163 just adjusts one of its arguments and forwards to another function), so
164 we should try to cut corners where we can. */
165 int current_function_is_thunk;
167 /* Nonzero if function being compiled can call alloca,
168 either as a subroutine or builtin. */
170 int current_function_calls_alloca;
172 /* Nonzero if the current function returns a pointer type */
174 int current_function_returns_pointer;
176 /* If some insns can be deferred to the delay slots of the epilogue, the
177 delay list for them is recorded here. */
179 rtx current_function_epilogue_delay_list;
181 /* If function's args have a fixed size, this is that size, in bytes.
182 Otherwise, it is -1.
183 May affect compilation of return insn or of function epilogue. */
185 int current_function_args_size;
187 /* # bytes the prologue should push and pretend that the caller pushed them.
188 The prologue must do this, but only if parms can be passed in registers. */
190 int current_function_pretend_args_size;
192 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
193 defined, the needed space is pushed by the prologue. */
195 int current_function_outgoing_args_size;
197 /* This is the offset from the arg pointer to the place where the first
198 anonymous arg can be found, if there is one. */
200 rtx current_function_arg_offset_rtx;
202 /* Nonzero if current function uses varargs.h or equivalent.
203 Zero for functions that use stdarg.h. */
205 int current_function_varargs;
207 /* Nonzero if current function uses stdarg.h or equivalent.
208 Zero for functions that use varargs.h. */
210 int current_function_stdarg;
212 /* Quantities of various kinds of registers
213 used for the current function's args. */
215 CUMULATIVE_ARGS current_function_args_info;
217 /* Name of function now being compiled. */
219 char *current_function_name;
221 /* If non-zero, an RTL expression for the location at which the current
222 function returns its result. If the current function returns its
223 result in a register, current_function_return_rtx will always be
224 the hard register containing the result. */
226 rtx current_function_return_rtx;
228 /* Nonzero if the current function uses the constant pool. */
230 int current_function_uses_const_pool;
232 /* Nonzero if the current function uses pic_offset_table_rtx. */
233 int current_function_uses_pic_offset_table;
235 /* The arg pointer hard register, or the pseudo into which it was copied. */
236 rtx current_function_internal_arg_pointer;
238 /* Language-specific reason why the current function cannot be made inline. */
239 char *current_function_cannot_inline;
241 /* Nonzero if instrumentation calls for function entry and exit should be
242 generated. */
243 int current_function_instrument_entry_exit;
245 /* Nonzero if memory access checking be enabled in the current function. */
246 int current_function_check_memory_usage;
248 /* The FUNCTION_DECL for an inline function currently being expanded. */
249 tree inline_function_decl;
251 /* Number of function calls seen so far in current function. */
253 int function_call_count;
255 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
256 (labels to which there can be nonlocal gotos from nested functions)
257 in this function. */
259 tree nonlocal_labels;
261 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
262 for nonlocal gotos. There is one for every nonlocal label in the function;
263 this list matches the one in nonlocal_labels.
264 Zero when function does not have nonlocal labels. */
266 rtx nonlocal_goto_handler_slots;
268 /* List (chain of EXPR_LIST) of labels heading the current handlers for
269 nonlocal gotos. */
271 rtx nonlocal_goto_handler_labels;
273 /* RTX for stack slot that holds the stack pointer value to restore
274 for a nonlocal goto.
275 Zero when function does not have nonlocal labels. */
277 rtx nonlocal_goto_stack_level;
279 /* Label that will go on parm cleanup code, if any.
280 Jumping to this label runs cleanup code for parameters, if
281 such code must be run. Following this code is the logical return label. */
283 rtx cleanup_label;
285 /* Label that will go on function epilogue.
286 Jumping to this label serves as a "return" instruction
287 on machines which require execution of the epilogue on all returns. */
289 rtx return_label;
291 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
292 So we can mark them all live at the end of the function, if nonopt. */
293 rtx save_expr_regs;
295 /* List (chain of EXPR_LISTs) of all stack slots in this function.
296 Made for the sake of unshare_all_rtl. */
297 rtx stack_slot_list;
299 /* Chain of all RTL_EXPRs that have insns in them. */
300 tree rtl_expr_chain;
302 /* Label to jump back to for tail recursion, or 0 if we have
303 not yet needed one for this function. */
304 rtx tail_recursion_label;
306 /* Place after which to insert the tail_recursion_label if we need one. */
307 rtx tail_recursion_reentry;
309 /* Location at which to save the argument pointer if it will need to be
310 referenced. There are two cases where this is done: if nonlocal gotos
311 exist, or if vars stored at an offset from the argument pointer will be
312 needed by inner routines. */
314 rtx arg_pointer_save_area;
316 /* Offset to end of allocated area of stack frame.
317 If stack grows down, this is the address of the last stack slot allocated.
318 If stack grows up, this is the address for the next slot. */
319 HOST_WIDE_INT frame_offset;
321 /* List (chain of TREE_LISTs) of static chains for containing functions.
322 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
323 in an RTL_EXPR in the TREE_VALUE. */
324 static tree context_display;
326 /* List (chain of TREE_LISTs) of trampolines for nested functions.
327 The trampoline sets up the static chain and jumps to the function.
328 We supply the trampoline's address when the function's address is requested.
330 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
331 in an RTL_EXPR in the TREE_VALUE. */
332 static tree trampoline_list;
334 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
335 static rtx parm_birth_insn;
337 #if 0
338 /* Nonzero if a stack slot has been generated whose address is not
339 actually valid. It means that the generated rtl must all be scanned
340 to detect and correct the invalid addresses where they occur. */
341 static int invalid_stack_slot;
342 #endif
344 /* Last insn of those whose job was to put parms into their nominal homes. */
345 static rtx last_parm_insn;
347 /* 1 + last pseudo register number possibly used for loading a copy
348 of a parameter of this function. */
349 int max_parm_reg;
351 /* Vector indexed by REGNO, containing location on stack in which
352 to put the parm which is nominally in pseudo register REGNO,
353 if we discover that that parm must go in the stack. The highest
354 element in this vector is one less than MAX_PARM_REG, above. */
355 rtx *parm_reg_stack_loc;
357 /* Nonzero once virtual register instantiation has been done.
358 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
359 static int virtuals_instantiated;
361 /* These variables hold pointers to functions to
362 save and restore machine-specific data,
363 in push_function_context and pop_function_context. */
364 void (*save_machine_status) PROTO((struct function *));
365 void (*restore_machine_status) PROTO((struct function *));
367 /* Nonzero if we need to distinguish between the return value of this function
368 and the return value of a function called by this function. This helps
369 integrate.c */
371 extern int rtx_equal_function_value_matters;
372 extern tree sequence_rtl_expr;
374 /* In order to evaluate some expressions, such as function calls returning
375 structures in memory, we need to temporarily allocate stack locations.
376 We record each allocated temporary in the following structure.
378 Associated with each temporary slot is a nesting level. When we pop up
379 one level, all temporaries associated with the previous level are freed.
380 Normally, all temporaries are freed after the execution of the statement
381 in which they were created. However, if we are inside a ({...}) grouping,
382 the result may be in a temporary and hence must be preserved. If the
383 result could be in a temporary, we preserve it if we can determine which
384 one it is in. If we cannot determine which temporary may contain the
385 result, all temporaries are preserved. A temporary is preserved by
386 pretending it was allocated at the previous nesting level.
388 Automatic variables are also assigned temporary slots, at the nesting
389 level where they are defined. They are marked a "kept" so that
390 free_temp_slots will not free them. */
392 struct temp_slot
394 /* Points to next temporary slot. */
395 struct temp_slot *next;
396 /* The rtx to used to reference the slot. */
397 rtx slot;
398 /* The rtx used to represent the address if not the address of the
399 slot above. May be an EXPR_LIST if multiple addresses exist. */
400 rtx address;
401 /* The alignment (in bits) of the slot. */
402 int align;
403 /* The size, in units, of the slot. */
404 HOST_WIDE_INT size;
405 /* The alias set for the slot. If the alias set is zero, we don't
406 know anything about the alias set of the slot. We must only
407 reuse a slot if it is assigned an object of the same alias set.
408 Otherwise, the rest of the compiler may assume that the new use
409 of the slot cannot alias the old use of the slot, which is
410 false. If the slot has alias set zero, then we can't reuse the
411 slot at all, since we have no idea what alias set may have been
412 imposed on the memory. For example, if the stack slot is the
413 call frame for an inline functioned, we have no idea what alias
414 sets will be assigned to various pieces of the call frame. */
415 int alias_set;
416 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
417 tree rtl_expr;
418 /* Non-zero if this temporary is currently in use. */
419 char in_use;
420 /* Non-zero if this temporary has its address taken. */
421 char addr_taken;
422 /* Nesting level at which this slot is being used. */
423 int level;
424 /* Non-zero if this should survive a call to free_temp_slots. */
425 int keep;
426 /* The offset of the slot from the frame_pointer, including extra space
427 for alignment. This info is for combine_temp_slots. */
428 HOST_WIDE_INT base_offset;
429 /* The size of the slot, including extra space for alignment. This
430 info is for combine_temp_slots. */
431 HOST_WIDE_INT full_size;
434 /* List of all temporaries allocated, both available and in use. */
436 struct temp_slot *temp_slots;
438 /* Current nesting level for temporaries. */
440 int temp_slot_level;
442 /* Current nesting level for variables in a block. */
444 int var_temp_slot_level;
446 /* When temporaries are created by TARGET_EXPRs, they are created at
447 this level of temp_slot_level, so that they can remain allocated
448 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
449 of TARGET_EXPRs. */
450 int target_temp_slot_level;
452 /* This structure is used to record MEMs or pseudos used to replace VAR, any
453 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
454 maintain this list in case two operands of an insn were required to match;
455 in that case we must ensure we use the same replacement. */
457 struct fixup_replacement
459 rtx old;
460 rtx new;
461 struct fixup_replacement *next;
464 struct insns_for_mem_entry {
465 /* The KEY in HE will be a MEM. */
466 struct hash_entry he;
467 /* These are the INSNS which reference the MEM. */
468 rtx insns;
471 /* Forward declarations. */
473 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
474 int, struct function *));
475 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
476 int, tree));
477 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
478 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
479 enum machine_mode, enum machine_mode,
480 int, int, int,
481 struct hash_table *));
482 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
483 struct hash_table *));
484 static struct fixup_replacement
485 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
486 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
487 rtx, int, struct hash_table *));
488 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
489 struct fixup_replacement **));
490 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
491 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
492 static rtx fixup_stack_1 PROTO((rtx, rtx));
493 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
494 static void instantiate_decls PROTO((tree, int));
495 static void instantiate_decls_1 PROTO((tree, int));
496 static void instantiate_decl PROTO((rtx, int, int));
497 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
498 static void delete_handlers PROTO((void));
499 static void pad_to_arg_alignment PROTO((struct args_size *, int));
500 #ifndef ARGS_GROW_DOWNWARD
501 static void pad_below PROTO((struct args_size *, enum machine_mode,
502 tree));
503 #endif
504 #ifdef ARGS_GROW_DOWNWARD
505 static tree round_down PROTO((tree, int));
506 #endif
507 static rtx round_trampoline_addr PROTO((rtx));
508 static tree blocks_nreverse PROTO((tree));
509 static int all_blocks PROTO((tree, tree *));
510 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
511 static int *record_insns PROTO((rtx));
512 static int contains PROTO((rtx, int *));
513 #endif /* HAVE_prologue || HAVE_epilogue */
514 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
515 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
516 struct hash_table *));
517 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
518 struct hash_table *,
519 hash_table_key));
520 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
521 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
522 static int insns_for_mem_walk PROTO ((rtx *, void *));
523 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
526 /* Pointer to chain of `struct function' for containing functions. */
527 struct function *outer_function_chain;
529 /* Given a function decl for a containing function,
530 return the `struct function' for it. */
532 struct function *
533 find_function_data (decl)
534 tree decl;
536 struct function *p;
538 for (p = outer_function_chain; p; p = p->next)
539 if (p->decl == decl)
540 return p;
542 abort ();
545 /* Save the current context for compilation of a nested function.
546 This is called from language-specific code.
547 The caller is responsible for saving any language-specific status,
548 since this function knows only about language-independent variables. */
550 void
551 push_function_context_to (context)
552 tree context;
554 struct function *p = (struct function *) xmalloc (sizeof (struct function));
556 p->next = outer_function_chain;
557 outer_function_chain = p;
559 p->name = current_function_name;
560 p->decl = current_function_decl;
561 p->pops_args = current_function_pops_args;
562 p->returns_struct = current_function_returns_struct;
563 p->returns_pcc_struct = current_function_returns_pcc_struct;
564 p->returns_pointer = current_function_returns_pointer;
565 p->needs_context = current_function_needs_context;
566 p->calls_setjmp = current_function_calls_setjmp;
567 p->calls_longjmp = current_function_calls_longjmp;
568 p->calls_alloca = current_function_calls_alloca;
569 p->has_nonlocal_label = current_function_has_nonlocal_label;
570 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
571 p->contains_functions = current_function_contains_functions;
572 p->has_computed_jump = current_function_has_computed_jump;
573 p->is_thunk = current_function_is_thunk;
574 p->args_size = current_function_args_size;
575 p->pretend_args_size = current_function_pretend_args_size;
576 p->arg_offset_rtx = current_function_arg_offset_rtx;
577 p->varargs = current_function_varargs;
578 p->stdarg = current_function_stdarg;
579 p->uses_const_pool = current_function_uses_const_pool;
580 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
581 p->internal_arg_pointer = current_function_internal_arg_pointer;
582 p->cannot_inline = current_function_cannot_inline;
583 p->max_parm_reg = max_parm_reg;
584 p->parm_reg_stack_loc = parm_reg_stack_loc;
585 p->outgoing_args_size = current_function_outgoing_args_size;
586 p->return_rtx = current_function_return_rtx;
587 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
588 p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
589 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
590 p->nonlocal_labels = nonlocal_labels;
591 p->cleanup_label = cleanup_label;
592 p->return_label = return_label;
593 p->save_expr_regs = save_expr_regs;
594 p->stack_slot_list = stack_slot_list;
595 p->parm_birth_insn = parm_birth_insn;
596 p->frame_offset = frame_offset;
597 p->tail_recursion_label = tail_recursion_label;
598 p->tail_recursion_reentry = tail_recursion_reentry;
599 p->arg_pointer_save_area = arg_pointer_save_area;
600 p->rtl_expr_chain = rtl_expr_chain;
601 p->last_parm_insn = last_parm_insn;
602 p->context_display = context_display;
603 p->trampoline_list = trampoline_list;
604 p->function_call_count = function_call_count;
605 p->temp_slots = temp_slots;
606 p->temp_slot_level = temp_slot_level;
607 p->target_temp_slot_level = target_temp_slot_level;
608 p->var_temp_slot_level = var_temp_slot_level;
609 p->fixup_var_refs_queue = 0;
610 p->epilogue_delay_list = current_function_epilogue_delay_list;
611 p->args_info = current_function_args_info;
612 p->check_memory_usage = current_function_check_memory_usage;
613 p->instrument_entry_exit = current_function_instrument_entry_exit;
615 save_tree_status (p, context);
616 save_storage_status (p);
617 save_emit_status (p);
618 save_expr_status (p);
619 save_stmt_status (p);
620 save_varasm_status (p, context);
621 if (save_machine_status)
622 (*save_machine_status) (p);
625 void
626 push_function_context ()
628 push_function_context_to (current_function_decl);
631 /* Restore the last saved context, at the end of a nested function.
632 This function is called from language-specific code. */
634 void
635 pop_function_context_from (context)
636 tree context;
638 struct function *p = outer_function_chain;
639 struct var_refs_queue *queue;
641 outer_function_chain = p->next;
643 current_function_contains_functions
644 = p->contains_functions || p->inline_obstacks
645 || context == current_function_decl;
646 current_function_has_computed_jump = p->has_computed_jump;
647 current_function_name = p->name;
648 current_function_decl = p->decl;
649 current_function_pops_args = p->pops_args;
650 current_function_returns_struct = p->returns_struct;
651 current_function_returns_pcc_struct = p->returns_pcc_struct;
652 current_function_returns_pointer = p->returns_pointer;
653 current_function_needs_context = p->needs_context;
654 current_function_calls_setjmp = p->calls_setjmp;
655 current_function_calls_longjmp = p->calls_longjmp;
656 current_function_calls_alloca = p->calls_alloca;
657 current_function_has_nonlocal_label = p->has_nonlocal_label;
658 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
659 current_function_is_thunk = p->is_thunk;
660 current_function_args_size = p->args_size;
661 current_function_pretend_args_size = p->pretend_args_size;
662 current_function_arg_offset_rtx = p->arg_offset_rtx;
663 current_function_varargs = p->varargs;
664 current_function_stdarg = p->stdarg;
665 current_function_uses_const_pool = p->uses_const_pool;
666 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
667 current_function_internal_arg_pointer = p->internal_arg_pointer;
668 current_function_cannot_inline = p->cannot_inline;
669 max_parm_reg = p->max_parm_reg;
670 parm_reg_stack_loc = p->parm_reg_stack_loc;
671 current_function_outgoing_args_size = p->outgoing_args_size;
672 current_function_return_rtx = p->return_rtx;
673 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
674 nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
675 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
676 nonlocal_labels = p->nonlocal_labels;
677 cleanup_label = p->cleanup_label;
678 return_label = p->return_label;
679 save_expr_regs = p->save_expr_regs;
680 stack_slot_list = p->stack_slot_list;
681 parm_birth_insn = p->parm_birth_insn;
682 frame_offset = p->frame_offset;
683 tail_recursion_label = p->tail_recursion_label;
684 tail_recursion_reentry = p->tail_recursion_reentry;
685 arg_pointer_save_area = p->arg_pointer_save_area;
686 rtl_expr_chain = p->rtl_expr_chain;
687 last_parm_insn = p->last_parm_insn;
688 context_display = p->context_display;
689 trampoline_list = p->trampoline_list;
690 function_call_count = p->function_call_count;
691 temp_slots = p->temp_slots;
692 temp_slot_level = p->temp_slot_level;
693 target_temp_slot_level = p->target_temp_slot_level;
694 var_temp_slot_level = p->var_temp_slot_level;
695 current_function_epilogue_delay_list = p->epilogue_delay_list;
696 reg_renumber = 0;
697 current_function_args_info = p->args_info;
698 current_function_check_memory_usage = p->check_memory_usage;
699 current_function_instrument_entry_exit = p->instrument_entry_exit;
701 restore_tree_status (p, context);
702 restore_storage_status (p);
703 restore_expr_status (p);
704 restore_emit_status (p);
705 restore_stmt_status (p);
706 restore_varasm_status (p);
708 if (restore_machine_status)
709 (*restore_machine_status) (p);
711 /* Finish doing put_var_into_stack for any of our variables
712 which became addressable during the nested function. */
713 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
714 fixup_var_refs (queue->modified, queue->promoted_mode,
715 queue->unsignedp, 0);
717 free (p);
719 /* Reset variables that have known state during rtx generation. */
720 rtx_equal_function_value_matters = 1;
721 virtuals_instantiated = 0;
724 void pop_function_context ()
726 pop_function_context_from (current_function_decl);
729 /* Allocate fixed slots in the stack frame of the current function. */
731 /* Return size needed for stack frame based on slots so far allocated.
732 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
733 the caller may have to do that. */
735 HOST_WIDE_INT
736 get_frame_size ()
738 #ifdef FRAME_GROWS_DOWNWARD
739 return -frame_offset;
740 #else
741 return frame_offset;
742 #endif
745 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
746 with machine mode MODE.
748 ALIGN controls the amount of alignment for the address of the slot:
749 0 means according to MODE,
750 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
751 positive specifies alignment boundary in bits.
753 We do not round to stack_boundary here. */
756 assign_stack_local (mode, size, align)
757 enum machine_mode mode;
758 HOST_WIDE_INT size;
759 int align;
761 register rtx x, addr;
762 int bigend_correction = 0;
763 int alignment;
765 if (align == 0)
767 tree type;
769 alignment = GET_MODE_ALIGNMENT (mode);
770 if (mode == BLKmode)
771 alignment = BIGGEST_ALIGNMENT;
773 /* Allow the target to (possibly) increase the alignment of this
774 stack slot. */
775 type = type_for_mode (mode, 0);
776 if (type)
777 alignment = LOCAL_ALIGNMENT (type, alignment);
779 alignment /= BITS_PER_UNIT;
781 else if (align == -1)
783 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
784 size = CEIL_ROUND (size, alignment);
786 else
787 alignment = align / BITS_PER_UNIT;
789 #ifdef FRAME_GROWS_DOWNWARD
790 frame_offset -= size;
791 #endif
793 /* Round frame offset to that alignment.
794 We must be careful here, since FRAME_OFFSET might be negative and
795 division with a negative dividend isn't as well defined as we might
796 like. So we instead assume that ALIGNMENT is a power of two and
797 use logical operations which are unambiguous. */
798 #ifdef FRAME_GROWS_DOWNWARD
799 frame_offset = FLOOR_ROUND (frame_offset, alignment);
800 #else
801 frame_offset = CEIL_ROUND (frame_offset, alignment);
802 #endif
804 /* On a big-endian machine, if we are allocating more space than we will use,
805 use the least significant bytes of those that are allocated. */
806 if (BYTES_BIG_ENDIAN && mode != BLKmode)
807 bigend_correction = size - GET_MODE_SIZE (mode);
809 /* If we have already instantiated virtual registers, return the actual
810 address relative to the frame pointer. */
811 if (virtuals_instantiated)
812 addr = plus_constant (frame_pointer_rtx,
813 (frame_offset + bigend_correction
814 + STARTING_FRAME_OFFSET));
815 else
816 addr = plus_constant (virtual_stack_vars_rtx,
817 frame_offset + bigend_correction);
819 #ifndef FRAME_GROWS_DOWNWARD
820 frame_offset += size;
821 #endif
823 x = gen_rtx_MEM (mode, addr);
825 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
827 return x;
830 /* Assign a stack slot in a containing function.
831 First three arguments are same as in preceding function.
832 The last argument specifies the function to allocate in. */
834 static rtx
835 assign_outer_stack_local (mode, size, align, function)
836 enum machine_mode mode;
837 HOST_WIDE_INT size;
838 int align;
839 struct function *function;
841 register rtx x, addr;
842 int bigend_correction = 0;
843 int alignment;
845 /* Allocate in the memory associated with the function in whose frame
846 we are assigning. */
847 push_obstacks (function->function_obstack,
848 function->function_maybepermanent_obstack);
850 if (align == 0)
852 tree type;
854 alignment = GET_MODE_ALIGNMENT (mode);
855 if (mode == BLKmode)
856 alignment = BIGGEST_ALIGNMENT;
858 /* Allow the target to (possibly) increase the alignment of this
859 stack slot. */
860 type = type_for_mode (mode, 0);
861 if (type)
862 alignment = LOCAL_ALIGNMENT (type, alignment);
864 alignment /= BITS_PER_UNIT;
866 else if (align == -1)
868 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
869 size = CEIL_ROUND (size, alignment);
871 else
872 alignment = align / BITS_PER_UNIT;
874 #ifdef FRAME_GROWS_DOWNWARD
875 function->frame_offset -= size;
876 #endif
878 /* Round frame offset to that alignment. */
879 #ifdef FRAME_GROWS_DOWNWARD
880 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
881 #else
882 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
883 #endif
885 /* On a big-endian machine, if we are allocating more space than we will use,
886 use the least significant bytes of those that are allocated. */
887 if (BYTES_BIG_ENDIAN && mode != BLKmode)
888 bigend_correction = size - GET_MODE_SIZE (mode);
890 addr = plus_constant (virtual_stack_vars_rtx,
891 function->frame_offset + bigend_correction);
892 #ifndef FRAME_GROWS_DOWNWARD
893 function->frame_offset += size;
894 #endif
896 x = gen_rtx_MEM (mode, addr);
898 function->stack_slot_list
899 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
901 pop_obstacks ();
903 return x;
906 /* Allocate a temporary stack slot and record it for possible later
907 reuse.
909 MODE is the machine mode to be given to the returned rtx.
911 SIZE is the size in units of the space required. We do no rounding here
912 since assign_stack_local will do any required rounding.
914 KEEP is 1 if this slot is to be retained after a call to
915 free_temp_slots. Automatic variables for a block are allocated
916 with this flag. KEEP is 2 if we allocate a longer term temporary,
917 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
918 if we are to allocate something at an inner level to be treated as
919 a variable in the block (e.g., a SAVE_EXPR).
921 TYPE is the type that will be used for the stack slot. */
923 static rtx
924 assign_stack_temp_for_type (mode, size, keep, type)
925 enum machine_mode mode;
926 HOST_WIDE_INT size;
927 int keep;
928 tree type;
930 int align;
931 int alias_set;
932 struct temp_slot *p, *best_p = 0;
934 /* If SIZE is -1 it means that somebody tried to allocate a temporary
935 of a variable size. */
936 if (size == -1)
937 abort ();
939 /* If we know the alias set for the memory that will be used, use
940 it. If there's no TYPE, then we don't know anything about the
941 alias set for the memory. */
942 if (type)
943 alias_set = get_alias_set (type);
944 else
945 alias_set = 0;
947 align = GET_MODE_ALIGNMENT (mode);
948 if (mode == BLKmode)
949 align = BIGGEST_ALIGNMENT;
951 if (! type)
952 type = type_for_mode (mode, 0);
953 if (type)
954 align = LOCAL_ALIGNMENT (type, align);
956 /* Try to find an available, already-allocated temporary of the proper
957 mode which meets the size and alignment requirements. Choose the
958 smallest one with the closest alignment. */
959 for (p = temp_slots; p; p = p->next)
960 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
961 && ! p->in_use
962 && (!flag_strict_aliasing
963 || (alias_set && p->alias_set == alias_set))
964 && (best_p == 0 || best_p->size > p->size
965 || (best_p->size == p->size && best_p->align > p->align)))
967 if (p->align == align && p->size == size)
969 best_p = 0;
970 break;
972 best_p = p;
975 /* Make our best, if any, the one to use. */
976 if (best_p)
978 /* If there are enough aligned bytes left over, make them into a new
979 temp_slot so that the extra bytes don't get wasted. Do this only
980 for BLKmode slots, so that we can be sure of the alignment. */
981 if (GET_MODE (best_p->slot) == BLKmode
982 /* We can't split slots if -fstrict-aliasing because the
983 information about the alias set for the new slot will be
984 lost. */
985 && !flag_strict_aliasing)
987 int alignment = best_p->align / BITS_PER_UNIT;
988 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
990 if (best_p->size - rounded_size >= alignment)
992 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
993 p->in_use = p->addr_taken = 0;
994 p->size = best_p->size - rounded_size;
995 p->base_offset = best_p->base_offset + rounded_size;
996 p->full_size = best_p->full_size - rounded_size;
997 p->slot = gen_rtx_MEM (BLKmode,
998 plus_constant (XEXP (best_p->slot, 0),
999 rounded_size));
1000 p->align = best_p->align;
1001 p->address = 0;
1002 p->rtl_expr = 0;
1003 p->next = temp_slots;
1004 temp_slots = p;
1006 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
1007 stack_slot_list);
1009 best_p->size = rounded_size;
1010 best_p->full_size = rounded_size;
1014 p = best_p;
1017 /* If we still didn't find one, make a new temporary. */
1018 if (p == 0)
1020 HOST_WIDE_INT frame_offset_old = frame_offset;
1022 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1024 /* We are passing an explicit alignment request to assign_stack_local.
1025 One side effect of that is assign_stack_local will not round SIZE
1026 to ensure the frame offset remains suitably aligned.
1028 So for requests which depended on the rounding of SIZE, we go ahead
1029 and round it now. We also make sure ALIGNMENT is at least
1030 BIGGEST_ALIGNMENT. */
1031 if (mode == BLKmode && align < (BIGGEST_ALIGNMENT / BITS_PER_UNIT))
1032 abort();
1033 p->slot = assign_stack_local (mode,
1034 mode == BLKmode
1035 ? CEIL_ROUND (size, align) : size,
1036 align);
1038 p->align = align;
1039 p->alias_set = alias_set;
1041 /* The following slot size computation is necessary because we don't
1042 know the actual size of the temporary slot until assign_stack_local
1043 has performed all the frame alignment and size rounding for the
1044 requested temporary. Note that extra space added for alignment
1045 can be either above or below this stack slot depending on which
1046 way the frame grows. We include the extra space if and only if it
1047 is above this slot. */
1048 #ifdef FRAME_GROWS_DOWNWARD
1049 p->size = frame_offset_old - frame_offset;
1050 #else
1051 p->size = size;
1052 #endif
1054 /* Now define the fields used by combine_temp_slots. */
1055 #ifdef FRAME_GROWS_DOWNWARD
1056 p->base_offset = frame_offset;
1057 p->full_size = frame_offset_old - frame_offset;
1058 #else
1059 p->base_offset = frame_offset_old;
1060 p->full_size = frame_offset - frame_offset_old;
1061 #endif
1062 p->address = 0;
1063 p->next = temp_slots;
1064 temp_slots = p;
1067 p->in_use = 1;
1068 p->addr_taken = 0;
1069 p->rtl_expr = sequence_rtl_expr;
1071 if (keep == 2)
1073 p->level = target_temp_slot_level;
1074 p->keep = 0;
1076 else if (keep == 3)
1078 p->level = var_temp_slot_level;
1079 p->keep = 0;
1081 else
1083 p->level = temp_slot_level;
1084 p->keep = keep;
1087 /* We may be reusing an old slot, so clear any MEM flags that may have been
1088 set from before. */
1089 RTX_UNCHANGING_P (p->slot) = 0;
1090 MEM_IN_STRUCT_P (p->slot) = 0;
1091 MEM_SCALAR_P (p->slot) = 0;
1092 MEM_ALIAS_SET (p->slot) = 0;
1093 return p->slot;
1096 /* Allocate a temporary stack slot and record it for possible later
1097 reuse. First three arguments are same as in preceding function. */
1100 assign_stack_temp (mode, size, keep)
1101 enum machine_mode mode;
1102 HOST_WIDE_INT size;
1103 int keep;
1105 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1108 /* Assign a temporary of given TYPE.
1109 KEEP is as for assign_stack_temp.
1110 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1111 it is 0 if a register is OK.
1112 DONT_PROMOTE is 1 if we should not promote values in register
1113 to wider modes. */
1116 assign_temp (type, keep, memory_required, dont_promote)
1117 tree type;
1118 int keep;
1119 int memory_required;
1120 int dont_promote;
1122 enum machine_mode mode = TYPE_MODE (type);
1123 int unsignedp = TREE_UNSIGNED (type);
1125 if (mode == BLKmode || memory_required)
1127 HOST_WIDE_INT size = int_size_in_bytes (type);
1128 rtx tmp;
1130 /* Unfortunately, we don't yet know how to allocate variable-sized
1131 temporaries. However, sometimes we have a fixed upper limit on
1132 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1133 instead. This is the case for Chill variable-sized strings. */
1134 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1135 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1136 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1137 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1139 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1140 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1141 return tmp;
1144 #ifndef PROMOTE_FOR_CALL_ONLY
1145 if (! dont_promote)
1146 mode = promote_mode (type, mode, &unsignedp, 0);
1147 #endif
1149 return gen_reg_rtx (mode);
1152 /* Combine temporary stack slots which are adjacent on the stack.
1154 This allows for better use of already allocated stack space. This is only
1155 done for BLKmode slots because we can be sure that we won't have alignment
1156 problems in this case. */
1158 void
1159 combine_temp_slots ()
1161 struct temp_slot *p, *q;
1162 struct temp_slot *prev_p, *prev_q;
1163 int num_slots;
1165 /* We can't combine slots, because the information about which slot
1166 is in which alias set will be lost. */
1167 if (flag_strict_aliasing)
1168 return;
1170 /* If there are a lot of temp slots, don't do anything unless
1171 high levels of optimizaton. */
1172 if (! flag_expensive_optimizations)
1173 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1174 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1175 return;
1177 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1179 int delete_p = 0;
1181 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1182 for (q = p->next, prev_q = p; q; q = prev_q->next)
1184 int delete_q = 0;
1185 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1187 if (p->base_offset + p->full_size == q->base_offset)
1189 /* Q comes after P; combine Q into P. */
1190 p->size += q->size;
1191 p->full_size += q->full_size;
1192 delete_q = 1;
1194 else if (q->base_offset + q->full_size == p->base_offset)
1196 /* P comes after Q; combine P into Q. */
1197 q->size += p->size;
1198 q->full_size += p->full_size;
1199 delete_p = 1;
1200 break;
1203 /* Either delete Q or advance past it. */
1204 if (delete_q)
1205 prev_q->next = q->next;
1206 else
1207 prev_q = q;
1209 /* Either delete P or advance past it. */
1210 if (delete_p)
1212 if (prev_p)
1213 prev_p->next = p->next;
1214 else
1215 temp_slots = p->next;
1217 else
1218 prev_p = p;
1222 /* Find the temp slot corresponding to the object at address X. */
1224 static struct temp_slot *
1225 find_temp_slot_from_address (x)
1226 rtx x;
1228 struct temp_slot *p;
1229 rtx next;
1231 for (p = temp_slots; p; p = p->next)
1233 if (! p->in_use)
1234 continue;
1236 else if (XEXP (p->slot, 0) == x
1237 || p->address == x
1238 || (GET_CODE (x) == PLUS
1239 && XEXP (x, 0) == virtual_stack_vars_rtx
1240 && GET_CODE (XEXP (x, 1)) == CONST_INT
1241 && INTVAL (XEXP (x, 1)) >= p->base_offset
1242 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1243 return p;
1245 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1246 for (next = p->address; next; next = XEXP (next, 1))
1247 if (XEXP (next, 0) == x)
1248 return p;
1251 return 0;
1254 /* Indicate that NEW is an alternate way of referring to the temp slot
1255 that previously was known by OLD. */
1257 void
1258 update_temp_slot_address (old, new)
1259 rtx old, new;
1261 struct temp_slot *p = find_temp_slot_from_address (old);
1263 /* If none, return. Else add NEW as an alias. */
1264 if (p == 0)
1265 return;
1266 else if (p->address == 0)
1267 p->address = new;
1268 else
1270 if (GET_CODE (p->address) != EXPR_LIST)
1271 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1273 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1277 /* If X could be a reference to a temporary slot, mark the fact that its
1278 address was taken. */
1280 void
1281 mark_temp_addr_taken (x)
1282 rtx x;
1284 struct temp_slot *p;
1286 if (x == 0)
1287 return;
1289 /* If X is not in memory or is at a constant address, it cannot be in
1290 a temporary slot. */
1291 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1292 return;
1294 p = find_temp_slot_from_address (XEXP (x, 0));
1295 if (p != 0)
1296 p->addr_taken = 1;
1299 /* If X could be a reference to a temporary slot, mark that slot as
1300 belonging to the to one level higher than the current level. If X
1301 matched one of our slots, just mark that one. Otherwise, we can't
1302 easily predict which it is, so upgrade all of them. Kept slots
1303 need not be touched.
1305 This is called when an ({...}) construct occurs and a statement
1306 returns a value in memory. */
1308 void
1309 preserve_temp_slots (x)
1310 rtx x;
1312 struct temp_slot *p = 0;
1314 /* If there is no result, we still might have some objects whose address
1315 were taken, so we need to make sure they stay around. */
1316 if (x == 0)
1318 for (p = temp_slots; p; p = p->next)
1319 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1320 p->level--;
1322 return;
1325 /* If X is a register that is being used as a pointer, see if we have
1326 a temporary slot we know it points to. To be consistent with
1327 the code below, we really should preserve all non-kept slots
1328 if we can't find a match, but that seems to be much too costly. */
1329 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1330 p = find_temp_slot_from_address (x);
1332 /* If X is not in memory or is at a constant address, it cannot be in
1333 a temporary slot, but it can contain something whose address was
1334 taken. */
1335 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1337 for (p = temp_slots; p; p = p->next)
1338 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1339 p->level--;
1341 return;
1344 /* First see if we can find a match. */
1345 if (p == 0)
1346 p = find_temp_slot_from_address (XEXP (x, 0));
1348 if (p != 0)
1350 /* Move everything at our level whose address was taken to our new
1351 level in case we used its address. */
1352 struct temp_slot *q;
1354 if (p->level == temp_slot_level)
1356 for (q = temp_slots; q; q = q->next)
1357 if (q != p && q->addr_taken && q->level == p->level)
1358 q->level--;
1360 p->level--;
1361 p->addr_taken = 0;
1363 return;
1366 /* Otherwise, preserve all non-kept slots at this level. */
1367 for (p = temp_slots; p; p = p->next)
1368 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1369 p->level--;
1372 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1373 with that RTL_EXPR, promote it into a temporary slot at the present
1374 level so it will not be freed when we free slots made in the
1375 RTL_EXPR. */
1377 void
1378 preserve_rtl_expr_result (x)
1379 rtx x;
1381 struct temp_slot *p;
1383 /* If X is not in memory or is at a constant address, it cannot be in
1384 a temporary slot. */
1385 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1386 return;
1388 /* If we can find a match, move it to our level unless it is already at
1389 an upper level. */
1390 p = find_temp_slot_from_address (XEXP (x, 0));
1391 if (p != 0)
1393 p->level = MIN (p->level, temp_slot_level);
1394 p->rtl_expr = 0;
1397 return;
1400 /* Free all temporaries used so far. This is normally called at the end
1401 of generating code for a statement. Don't free any temporaries
1402 currently in use for an RTL_EXPR that hasn't yet been emitted.
1403 We could eventually do better than this since it can be reused while
1404 generating the same RTL_EXPR, but this is complex and probably not
1405 worthwhile. */
1407 void
1408 free_temp_slots ()
1410 struct temp_slot *p;
1412 for (p = temp_slots; p; p = p->next)
1413 if (p->in_use && p->level == temp_slot_level && ! p->keep
1414 && p->rtl_expr == 0)
1415 p->in_use = 0;
1417 combine_temp_slots ();
1420 /* Free all temporary slots used in T, an RTL_EXPR node. */
1422 void
1423 free_temps_for_rtl_expr (t)
1424 tree t;
1426 struct temp_slot *p;
1428 for (p = temp_slots; p; p = p->next)
1429 if (p->rtl_expr == t)
1430 p->in_use = 0;
1432 combine_temp_slots ();
1435 /* Mark all temporaries ever allocated in this function as not suitable
1436 for reuse until the current level is exited. */
1438 void
1439 mark_all_temps_used ()
1441 struct temp_slot *p;
1443 for (p = temp_slots; p; p = p->next)
1445 p->in_use = p->keep = 1;
1446 p->level = MIN (p->level, temp_slot_level);
1450 /* Push deeper into the nesting level for stack temporaries. */
1452 void
1453 push_temp_slots ()
1455 temp_slot_level++;
1458 /* Likewise, but save the new level as the place to allocate variables
1459 for blocks. */
1461 void
1462 push_temp_slots_for_block ()
1464 push_temp_slots ();
1466 var_temp_slot_level = temp_slot_level;
1469 /* Likewise, but save the new level as the place to allocate temporaries
1470 for TARGET_EXPRs. */
1472 void
1473 push_temp_slots_for_target ()
1475 push_temp_slots ();
1477 target_temp_slot_level = temp_slot_level;
1480 /* Set and get the value of target_temp_slot_level. The only
1481 permitted use of these functions is to save and restore this value. */
1484 get_target_temp_slot_level ()
1486 return target_temp_slot_level;
1489 void
1490 set_target_temp_slot_level (level)
1491 int level;
1493 target_temp_slot_level = level;
1496 /* Pop a temporary nesting level. All slots in use in the current level
1497 are freed. */
1499 void
1500 pop_temp_slots ()
1502 struct temp_slot *p;
1504 for (p = temp_slots; p; p = p->next)
1505 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1506 p->in_use = 0;
1508 combine_temp_slots ();
1510 temp_slot_level--;
1513 /* Initialize temporary slots. */
1515 void
1516 init_temp_slots ()
1518 /* We have not allocated any temporaries yet. */
1519 temp_slots = 0;
1520 temp_slot_level = 0;
1521 var_temp_slot_level = 0;
1522 target_temp_slot_level = 0;
1525 /* Retroactively move an auto variable from a register to a stack slot.
1526 This is done when an address-reference to the variable is seen. */
1528 void
1529 put_var_into_stack (decl)
1530 tree decl;
1532 register rtx reg;
1533 enum machine_mode promoted_mode, decl_mode;
1534 struct function *function = 0;
1535 tree context;
1536 int can_use_addressof;
1538 context = decl_function_context (decl);
1540 /* Get the current rtl used for this object and its original mode. */
1541 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1543 /* No need to do anything if decl has no rtx yet
1544 since in that case caller is setting TREE_ADDRESSABLE
1545 and a stack slot will be assigned when the rtl is made. */
1546 if (reg == 0)
1547 return;
1549 /* Get the declared mode for this object. */
1550 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1551 : DECL_MODE (decl));
1552 /* Get the mode it's actually stored in. */
1553 promoted_mode = GET_MODE (reg);
1555 /* If this variable comes from an outer function,
1556 find that function's saved context. */
1557 if (context != current_function_decl && context != inline_function_decl)
1558 for (function = outer_function_chain; function; function = function->next)
1559 if (function->decl == context)
1560 break;
1562 /* If this is a variable-size object with a pseudo to address it,
1563 put that pseudo into the stack, if the var is nonlocal. */
1564 if (DECL_NONLOCAL (decl)
1565 && GET_CODE (reg) == MEM
1566 && GET_CODE (XEXP (reg, 0)) == REG
1567 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1569 reg = XEXP (reg, 0);
1570 decl_mode = promoted_mode = GET_MODE (reg);
1573 can_use_addressof
1574 = (function == 0
1575 && optimize > 0
1576 /* FIXME make it work for promoted modes too */
1577 && decl_mode == promoted_mode
1578 #ifdef NON_SAVING_SETJMP
1579 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1580 #endif
1583 /* If we can't use ADDRESSOF, make sure we see through one we already
1584 generated. */
1585 if (! can_use_addressof && GET_CODE (reg) == MEM
1586 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1587 reg = XEXP (XEXP (reg, 0), 0);
1589 /* Now we should have a value that resides in one or more pseudo regs. */
1591 if (GET_CODE (reg) == REG)
1593 /* If this variable lives in the current function and we don't need
1594 to put things in the stack for the sake of setjmp, try to keep it
1595 in a register until we know we actually need the address. */
1596 if (can_use_addressof)
1597 gen_mem_addressof (reg, decl);
1598 else
1599 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1600 promoted_mode, decl_mode,
1601 TREE_SIDE_EFFECTS (decl), 0,
1602 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1605 else if (GET_CODE (reg) == CONCAT)
1607 /* A CONCAT contains two pseudos; put them both in the stack.
1608 We do it so they end up consecutive. */
1609 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1610 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1611 #ifdef FRAME_GROWS_DOWNWARD
1612 /* Since part 0 should have a lower address, do it second. */
1613 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1614 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1615 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1617 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1618 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1619 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1621 #else
1622 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1623 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1624 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1626 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1627 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1628 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1630 #endif
1632 /* Change the CONCAT into a combined MEM for both parts. */
1633 PUT_CODE (reg, MEM);
1634 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1635 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1637 /* The two parts are in memory order already.
1638 Use the lower parts address as ours. */
1639 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1640 /* Prevent sharing of rtl that might lose. */
1641 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1642 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1644 else
1645 return;
1647 if (current_function_check_memory_usage)
1648 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1649 XEXP (reg, 0), Pmode,
1650 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1651 TYPE_MODE (sizetype),
1652 GEN_INT (MEMORY_USE_RW),
1653 TYPE_MODE (integer_type_node));
1656 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1657 into the stack frame of FUNCTION (0 means the current function).
1658 DECL_MODE is the machine mode of the user-level data type.
1659 PROMOTED_MODE is the machine mode of the register.
1660 VOLATILE_P is nonzero if this is for a "volatile" decl.
1661 USED_P is nonzero if this reg might have already been used in an insn. */
1663 static void
1664 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1665 original_regno, used_p, ht)
1666 struct function *function;
1667 rtx reg;
1668 tree type;
1669 enum machine_mode promoted_mode, decl_mode;
1670 int volatile_p;
1671 int original_regno;
1672 int used_p;
1673 struct hash_table *ht;
1675 rtx new = 0;
1676 int regno = original_regno;
1678 if (regno == 0)
1679 regno = REGNO (reg);
1681 if (function)
1683 if (regno < function->max_parm_reg)
1684 new = function->parm_reg_stack_loc[regno];
1685 if (new == 0)
1686 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1687 0, function);
1689 else
1691 if (regno < max_parm_reg)
1692 new = parm_reg_stack_loc[regno];
1693 if (new == 0)
1694 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1697 PUT_MODE (reg, decl_mode);
1698 XEXP (reg, 0) = XEXP (new, 0);
1699 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1700 MEM_VOLATILE_P (reg) = volatile_p;
1701 PUT_CODE (reg, MEM);
1703 /* If this is a memory ref that contains aggregate components,
1704 mark it as such for cse and loop optimize. If we are reusing a
1705 previously generated stack slot, then we need to copy the bit in
1706 case it was set for other reasons. For instance, it is set for
1707 __builtin_va_alist. */
1708 MEM_SET_IN_STRUCT_P (reg,
1709 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1710 MEM_ALIAS_SET (reg) = get_alias_set (type);
1712 /* Now make sure that all refs to the variable, previously made
1713 when it was a register, are fixed up to be valid again. */
1715 if (used_p && function != 0)
1717 struct var_refs_queue *temp;
1719 /* Variable is inherited; fix it up when we get back to its function. */
1720 push_obstacks (function->function_obstack,
1721 function->function_maybepermanent_obstack);
1723 /* See comment in restore_tree_status in tree.c for why this needs to be
1724 on saveable obstack. */
1725 temp
1726 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1727 temp->modified = reg;
1728 temp->promoted_mode = promoted_mode;
1729 temp->unsignedp = TREE_UNSIGNED (type);
1730 temp->next = function->fixup_var_refs_queue;
1731 function->fixup_var_refs_queue = temp;
1732 pop_obstacks ();
1734 else if (used_p)
1735 /* Variable is local; fix it up now. */
1736 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1739 static void
1740 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1741 rtx var;
1742 enum machine_mode promoted_mode;
1743 int unsignedp;
1744 struct hash_table *ht;
1746 tree pending;
1747 rtx first_insn = get_insns ();
1748 struct sequence_stack *stack = sequence_stack;
1749 tree rtl_exps = rtl_expr_chain;
1751 /* Must scan all insns for stack-refs that exceed the limit. */
1752 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1753 stack == 0, ht);
1754 /* If there's a hash table, it must record all uses of VAR. */
1755 if (ht)
1756 return;
1758 /* Scan all pending sequences too. */
1759 for (; stack; stack = stack->next)
1761 push_to_sequence (stack->first);
1762 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1763 stack->first, stack->next != 0, 0);
1764 /* Update remembered end of sequence
1765 in case we added an insn at the end. */
1766 stack->last = get_last_insn ();
1767 end_sequence ();
1770 /* Scan all waiting RTL_EXPRs too. */
1771 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1773 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1774 if (seq != const0_rtx && seq != 0)
1776 push_to_sequence (seq);
1777 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1779 end_sequence ();
1783 /* Scan the catch clauses for exception handling too. */
1784 push_to_sequence (catch_clauses);
1785 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1786 0, 0);
1787 end_sequence ();
1790 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1791 some part of an insn. Return a struct fixup_replacement whose OLD
1792 value is equal to X. Allocate a new structure if no such entry exists. */
1794 static struct fixup_replacement *
1795 find_fixup_replacement (replacements, x)
1796 struct fixup_replacement **replacements;
1797 rtx x;
1799 struct fixup_replacement *p;
1801 /* See if we have already replaced this. */
1802 for (p = *replacements; p && p->old != x; p = p->next)
1805 if (p == 0)
1807 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1808 p->old = x;
1809 p->new = 0;
1810 p->next = *replacements;
1811 *replacements = p;
1814 return p;
1817 /* Scan the insn-chain starting with INSN for refs to VAR
1818 and fix them up. TOPLEVEL is nonzero if this chain is the
1819 main chain of insns for the current function. */
1821 static void
1822 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1823 rtx var;
1824 enum machine_mode promoted_mode;
1825 int unsignedp;
1826 rtx insn;
1827 int toplevel;
1828 struct hash_table *ht;
1830 rtx call_dest = 0;
1831 rtx insn_list = NULL_RTX;
1833 /* If we already know which INSNs reference VAR there's no need
1834 to walk the entire instruction chain. */
1835 if (ht)
1837 insn_list = ((struct insns_for_mem_entry *)
1838 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1839 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1840 insn_list = XEXP (insn_list, 1);
1843 while (insn)
1845 rtx next = NEXT_INSN (insn);
1846 rtx set, prev, prev_set;
1847 rtx note;
1849 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1851 /* If this is a CLOBBER of VAR, delete it.
1853 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1854 and REG_RETVAL notes too. */
1855 if (GET_CODE (PATTERN (insn)) == CLOBBER
1856 && (XEXP (PATTERN (insn), 0) == var
1857 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1858 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1859 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1861 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1862 /* The REG_LIBCALL note will go away since we are going to
1863 turn INSN into a NOTE, so just delete the
1864 corresponding REG_RETVAL note. */
1865 remove_note (XEXP (note, 0),
1866 find_reg_note (XEXP (note, 0), REG_RETVAL,
1867 NULL_RTX));
1869 /* In unoptimized compilation, we shouldn't call delete_insn
1870 except in jump.c doing warnings. */
1871 PUT_CODE (insn, NOTE);
1872 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1873 NOTE_SOURCE_FILE (insn) = 0;
1876 /* The insn to load VAR from a home in the arglist
1877 is now a no-op. When we see it, just delete it.
1878 Similarly if this is storing VAR from a register from which
1879 it was loaded in the previous insn. This will occur
1880 when an ADDRESSOF was made for an arglist slot. */
1881 else if (toplevel
1882 && (set = single_set (insn)) != 0
1883 && SET_DEST (set) == var
1884 /* If this represents the result of an insn group,
1885 don't delete the insn. */
1886 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1887 && (rtx_equal_p (SET_SRC (set), var)
1888 || (GET_CODE (SET_SRC (set)) == REG
1889 && (prev = prev_nonnote_insn (insn)) != 0
1890 && (prev_set = single_set (prev)) != 0
1891 && SET_DEST (prev_set) == SET_SRC (set)
1892 && rtx_equal_p (SET_SRC (prev_set), var))))
1894 /* In unoptimized compilation, we shouldn't call delete_insn
1895 except in jump.c doing warnings. */
1896 PUT_CODE (insn, NOTE);
1897 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1898 NOTE_SOURCE_FILE (insn) = 0;
1899 if (insn == last_parm_insn)
1900 last_parm_insn = PREV_INSN (next);
1902 else
1904 struct fixup_replacement *replacements = 0;
1905 rtx next_insn = NEXT_INSN (insn);
1907 if (SMALL_REGISTER_CLASSES)
1909 /* If the insn that copies the results of a CALL_INSN
1910 into a pseudo now references VAR, we have to use an
1911 intermediate pseudo since we want the life of the
1912 return value register to be only a single insn.
1914 If we don't use an intermediate pseudo, such things as
1915 address computations to make the address of VAR valid
1916 if it is not can be placed between the CALL_INSN and INSN.
1918 To make sure this doesn't happen, we record the destination
1919 of the CALL_INSN and see if the next insn uses both that
1920 and VAR. */
1922 if (call_dest != 0 && GET_CODE (insn) == INSN
1923 && reg_mentioned_p (var, PATTERN (insn))
1924 && reg_mentioned_p (call_dest, PATTERN (insn)))
1926 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1928 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1930 PATTERN (insn) = replace_rtx (PATTERN (insn),
1931 call_dest, temp);
1934 if (GET_CODE (insn) == CALL_INSN
1935 && GET_CODE (PATTERN (insn)) == SET)
1936 call_dest = SET_DEST (PATTERN (insn));
1937 else if (GET_CODE (insn) == CALL_INSN
1938 && GET_CODE (PATTERN (insn)) == PARALLEL
1939 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1940 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1941 else
1942 call_dest = 0;
1945 /* See if we have to do anything to INSN now that VAR is in
1946 memory. If it needs to be loaded into a pseudo, use a single
1947 pseudo for the entire insn in case there is a MATCH_DUP
1948 between two operands. We pass a pointer to the head of
1949 a list of struct fixup_replacements. If fixup_var_refs_1
1950 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1951 it will record them in this list.
1953 If it allocated a pseudo for any replacement, we copy into
1954 it here. */
1956 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1957 &replacements);
1959 /* If this is last_parm_insn, and any instructions were output
1960 after it to fix it up, then we must set last_parm_insn to
1961 the last such instruction emitted. */
1962 if (insn == last_parm_insn)
1963 last_parm_insn = PREV_INSN (next_insn);
1965 while (replacements)
1967 if (GET_CODE (replacements->new) == REG)
1969 rtx insert_before;
1970 rtx seq;
1972 /* OLD might be a (subreg (mem)). */
1973 if (GET_CODE (replacements->old) == SUBREG)
1974 replacements->old
1975 = fixup_memory_subreg (replacements->old, insn, 0);
1976 else
1977 replacements->old
1978 = fixup_stack_1 (replacements->old, insn);
1980 insert_before = insn;
1982 /* If we are changing the mode, do a conversion.
1983 This might be wasteful, but combine.c will
1984 eliminate much of the waste. */
1986 if (GET_MODE (replacements->new)
1987 != GET_MODE (replacements->old))
1989 start_sequence ();
1990 convert_move (replacements->new,
1991 replacements->old, unsignedp);
1992 seq = gen_sequence ();
1993 end_sequence ();
1995 else
1996 seq = gen_move_insn (replacements->new,
1997 replacements->old);
1999 emit_insn_before (seq, insert_before);
2002 replacements = replacements->next;
2006 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
2007 But don't touch other insns referred to by reg-notes;
2008 we will get them elsewhere. */
2009 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2010 if (GET_CODE (note) != INSN_LIST)
2011 XEXP (note, 0)
2012 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
2015 if (!ht)
2016 insn = next;
2017 else if (insn_list)
2019 insn = XEXP (insn_list, 0);
2020 insn_list = XEXP (insn_list, 1);
2022 else
2023 insn = NULL_RTX;
2027 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2028 See if the rtx expression at *LOC in INSN needs to be changed.
2030 REPLACEMENTS is a pointer to a list head that starts out zero, but may
2031 contain a list of original rtx's and replacements. If we find that we need
2032 to modify this insn by replacing a memory reference with a pseudo or by
2033 making a new MEM to implement a SUBREG, we consult that list to see if
2034 we have already chosen a replacement. If none has already been allocated,
2035 we allocate it and update the list. fixup_var_refs_insns will copy VAR
2036 or the SUBREG, as appropriate, to the pseudo. */
2038 static void
2039 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
2040 register rtx var;
2041 enum machine_mode promoted_mode;
2042 register rtx *loc;
2043 rtx insn;
2044 struct fixup_replacement **replacements;
2046 register int i;
2047 register rtx x = *loc;
2048 RTX_CODE code = GET_CODE (x);
2049 register char *fmt;
2050 register rtx tem, tem1;
2051 struct fixup_replacement *replacement;
2053 switch (code)
2055 case ADDRESSOF:
2056 if (XEXP (x, 0) == var)
2058 /* Prevent sharing of rtl that might lose. */
2059 rtx sub = copy_rtx (XEXP (var, 0));
2061 if (! validate_change (insn, loc, sub, 0))
2063 rtx y = gen_reg_rtx (GET_MODE (sub));
2064 rtx seq, new_insn;
2066 /* We should be able to replace with a register or all is lost.
2067 Note that we can't use validate_change to verify this, since
2068 we're not caring for replacing all dups simultaneously. */
2069 if (! validate_replace_rtx (*loc, y, insn))
2070 abort ();
2072 /* Careful! First try to recognize a direct move of the
2073 value, mimicking how things are done in gen_reload wrt
2074 PLUS. Consider what happens when insn is a conditional
2075 move instruction and addsi3 clobbers flags. */
2077 start_sequence ();
2078 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2079 seq = gen_sequence ();
2080 end_sequence ();
2082 if (recog_memoized (new_insn) < 0)
2084 /* That failed. Fall back on force_operand and hope. */
2086 start_sequence ();
2087 force_operand (sub, y);
2088 seq = gen_sequence ();
2089 end_sequence ();
2092 #ifdef HAVE_cc0
2093 /* Don't separate setter from user. */
2094 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2095 insn = PREV_INSN (insn);
2096 #endif
2098 emit_insn_before (seq, insn);
2101 return;
2103 case MEM:
2104 if (var == x)
2106 /* If we already have a replacement, use it. Otherwise,
2107 try to fix up this address in case it is invalid. */
2109 replacement = find_fixup_replacement (replacements, var);
2110 if (replacement->new)
2112 *loc = replacement->new;
2113 return;
2116 *loc = replacement->new = x = fixup_stack_1 (x, insn);
2118 /* Unless we are forcing memory to register or we changed the mode,
2119 we can leave things the way they are if the insn is valid. */
2121 INSN_CODE (insn) = -1;
2122 if (! flag_force_mem && GET_MODE (x) == promoted_mode
2123 && recog_memoized (insn) >= 0)
2124 return;
2126 *loc = replacement->new = gen_reg_rtx (promoted_mode);
2127 return;
2130 /* If X contains VAR, we need to unshare it here so that we update
2131 each occurrence separately. But all identical MEMs in one insn
2132 must be replaced with the same rtx because of the possibility of
2133 MATCH_DUPs. */
2135 if (reg_mentioned_p (var, x))
2137 replacement = find_fixup_replacement (replacements, x);
2138 if (replacement->new == 0)
2139 replacement->new = copy_most_rtx (x, var);
2141 *loc = x = replacement->new;
2143 break;
2145 case REG:
2146 case CC0:
2147 case PC:
2148 case CONST_INT:
2149 case CONST:
2150 case SYMBOL_REF:
2151 case LABEL_REF:
2152 case CONST_DOUBLE:
2153 return;
2155 case SIGN_EXTRACT:
2156 case ZERO_EXTRACT:
2157 /* Note that in some cases those types of expressions are altered
2158 by optimize_bit_field, and do not survive to get here. */
2159 if (XEXP (x, 0) == var
2160 || (GET_CODE (XEXP (x, 0)) == SUBREG
2161 && SUBREG_REG (XEXP (x, 0)) == var))
2163 /* Get TEM as a valid MEM in the mode presently in the insn.
2165 We don't worry about the possibility of MATCH_DUP here; it
2166 is highly unlikely and would be tricky to handle. */
2168 tem = XEXP (x, 0);
2169 if (GET_CODE (tem) == SUBREG)
2171 if (GET_MODE_BITSIZE (GET_MODE (tem))
2172 > GET_MODE_BITSIZE (GET_MODE (var)))
2174 replacement = find_fixup_replacement (replacements, var);
2175 if (replacement->new == 0)
2176 replacement->new = gen_reg_rtx (GET_MODE (var));
2177 SUBREG_REG (tem) = replacement->new;
2179 else
2180 tem = fixup_memory_subreg (tem, insn, 0);
2182 else
2183 tem = fixup_stack_1 (tem, insn);
2185 /* Unless we want to load from memory, get TEM into the proper mode
2186 for an extract from memory. This can only be done if the
2187 extract is at a constant position and length. */
2189 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2190 && GET_CODE (XEXP (x, 2)) == CONST_INT
2191 && ! mode_dependent_address_p (XEXP (tem, 0))
2192 && ! MEM_VOLATILE_P (tem))
2194 enum machine_mode wanted_mode = VOIDmode;
2195 enum machine_mode is_mode = GET_MODE (tem);
2196 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2198 #ifdef HAVE_extzv
2199 if (GET_CODE (x) == ZERO_EXTRACT)
2201 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2202 if (wanted_mode == VOIDmode)
2203 wanted_mode = word_mode;
2205 #endif
2206 #ifdef HAVE_extv
2207 if (GET_CODE (x) == SIGN_EXTRACT)
2209 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2210 if (wanted_mode == VOIDmode)
2211 wanted_mode = word_mode;
2213 #endif
2214 /* If we have a narrower mode, we can do something. */
2215 if (wanted_mode != VOIDmode
2216 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2218 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2219 rtx old_pos = XEXP (x, 2);
2220 rtx newmem;
2222 /* If the bytes and bits are counted differently, we
2223 must adjust the offset. */
2224 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2225 offset = (GET_MODE_SIZE (is_mode)
2226 - GET_MODE_SIZE (wanted_mode) - offset);
2228 pos %= GET_MODE_BITSIZE (wanted_mode);
2230 newmem = gen_rtx_MEM (wanted_mode,
2231 plus_constant (XEXP (tem, 0), offset));
2232 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2233 MEM_COPY_ATTRIBUTES (newmem, tem);
2235 /* Make the change and see if the insn remains valid. */
2236 INSN_CODE (insn) = -1;
2237 XEXP (x, 0) = newmem;
2238 XEXP (x, 2) = GEN_INT (pos);
2240 if (recog_memoized (insn) >= 0)
2241 return;
2243 /* Otherwise, restore old position. XEXP (x, 0) will be
2244 restored later. */
2245 XEXP (x, 2) = old_pos;
2249 /* If we get here, the bitfield extract insn can't accept a memory
2250 reference. Copy the input into a register. */
2252 tem1 = gen_reg_rtx (GET_MODE (tem));
2253 emit_insn_before (gen_move_insn (tem1, tem), insn);
2254 XEXP (x, 0) = tem1;
2255 return;
2257 break;
2259 case SUBREG:
2260 if (SUBREG_REG (x) == var)
2262 /* If this is a special SUBREG made because VAR was promoted
2263 from a wider mode, replace it with VAR and call ourself
2264 recursively, this time saying that the object previously
2265 had its current mode (by virtue of the SUBREG). */
2267 if (SUBREG_PROMOTED_VAR_P (x))
2269 *loc = var;
2270 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2271 return;
2274 /* If this SUBREG makes VAR wider, it has become a paradoxical
2275 SUBREG with VAR in memory, but these aren't allowed at this
2276 stage of the compilation. So load VAR into a pseudo and take
2277 a SUBREG of that pseudo. */
2278 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2280 replacement = find_fixup_replacement (replacements, var);
2281 if (replacement->new == 0)
2282 replacement->new = gen_reg_rtx (GET_MODE (var));
2283 SUBREG_REG (x) = replacement->new;
2284 return;
2287 /* See if we have already found a replacement for this SUBREG.
2288 If so, use it. Otherwise, make a MEM and see if the insn
2289 is recognized. If not, or if we should force MEM into a register,
2290 make a pseudo for this SUBREG. */
2291 replacement = find_fixup_replacement (replacements, x);
2292 if (replacement->new)
2294 *loc = replacement->new;
2295 return;
2298 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2300 INSN_CODE (insn) = -1;
2301 if (! flag_force_mem && recog_memoized (insn) >= 0)
2302 return;
2304 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2305 return;
2307 break;
2309 case SET:
2310 /* First do special simplification of bit-field references. */
2311 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2312 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2313 optimize_bit_field (x, insn, 0);
2314 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2315 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2316 optimize_bit_field (x, insn, NULL_PTR);
2318 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2319 into a register and then store it back out. */
2320 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2321 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2322 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2323 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2324 > GET_MODE_SIZE (GET_MODE (var))))
2326 replacement = find_fixup_replacement (replacements, var);
2327 if (replacement->new == 0)
2328 replacement->new = gen_reg_rtx (GET_MODE (var));
2330 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2331 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2334 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2335 insn into a pseudo and store the low part of the pseudo into VAR. */
2336 if (GET_CODE (SET_DEST (x)) == SUBREG
2337 && SUBREG_REG (SET_DEST (x)) == var
2338 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2339 > GET_MODE_SIZE (GET_MODE (var))))
2341 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2342 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2343 tem)),
2344 insn);
2345 break;
2349 rtx dest = SET_DEST (x);
2350 rtx src = SET_SRC (x);
2351 #ifdef HAVE_insv
2352 rtx outerdest = dest;
2353 #endif
2355 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2356 || GET_CODE (dest) == SIGN_EXTRACT
2357 || GET_CODE (dest) == ZERO_EXTRACT)
2358 dest = XEXP (dest, 0);
2360 if (GET_CODE (src) == SUBREG)
2361 src = XEXP (src, 0);
2363 /* If VAR does not appear at the top level of the SET
2364 just scan the lower levels of the tree. */
2366 if (src != var && dest != var)
2367 break;
2369 /* We will need to rerecognize this insn. */
2370 INSN_CODE (insn) = -1;
2372 #ifdef HAVE_insv
2373 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2375 /* Since this case will return, ensure we fixup all the
2376 operands here. */
2377 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2378 insn, replacements);
2379 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2380 insn, replacements);
2381 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2382 insn, replacements);
2384 tem = XEXP (outerdest, 0);
2386 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2387 that may appear inside a ZERO_EXTRACT.
2388 This was legitimate when the MEM was a REG. */
2389 if (GET_CODE (tem) == SUBREG
2390 && SUBREG_REG (tem) == var)
2391 tem = fixup_memory_subreg (tem, insn, 0);
2392 else
2393 tem = fixup_stack_1 (tem, insn);
2395 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2396 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2397 && ! mode_dependent_address_p (XEXP (tem, 0))
2398 && ! MEM_VOLATILE_P (tem))
2400 enum machine_mode wanted_mode;
2401 enum machine_mode is_mode = GET_MODE (tem);
2402 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2404 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2405 if (wanted_mode == VOIDmode)
2406 wanted_mode = word_mode;
2408 /* If we have a narrower mode, we can do something. */
2409 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2411 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2412 rtx old_pos = XEXP (outerdest, 2);
2413 rtx newmem;
2415 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2416 offset = (GET_MODE_SIZE (is_mode)
2417 - GET_MODE_SIZE (wanted_mode) - offset);
2419 pos %= GET_MODE_BITSIZE (wanted_mode);
2421 newmem = gen_rtx_MEM (wanted_mode,
2422 plus_constant (XEXP (tem, 0), offset));
2423 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2424 MEM_COPY_ATTRIBUTES (newmem, tem);
2426 /* Make the change and see if the insn remains valid. */
2427 INSN_CODE (insn) = -1;
2428 XEXP (outerdest, 0) = newmem;
2429 XEXP (outerdest, 2) = GEN_INT (pos);
2431 if (recog_memoized (insn) >= 0)
2432 return;
2434 /* Otherwise, restore old position. XEXP (x, 0) will be
2435 restored later. */
2436 XEXP (outerdest, 2) = old_pos;
2440 /* If we get here, the bit-field store doesn't allow memory
2441 or isn't located at a constant position. Load the value into
2442 a register, do the store, and put it back into memory. */
2444 tem1 = gen_reg_rtx (GET_MODE (tem));
2445 emit_insn_before (gen_move_insn (tem1, tem), insn);
2446 emit_insn_after (gen_move_insn (tem, tem1), insn);
2447 XEXP (outerdest, 0) = tem1;
2448 return;
2450 #endif
2452 /* STRICT_LOW_PART is a no-op on memory references
2453 and it can cause combinations to be unrecognizable,
2454 so eliminate it. */
2456 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2457 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2459 /* A valid insn to copy VAR into or out of a register
2460 must be left alone, to avoid an infinite loop here.
2461 If the reference to VAR is by a subreg, fix that up,
2462 since SUBREG is not valid for a memref.
2463 Also fix up the address of the stack slot.
2465 Note that we must not try to recognize the insn until
2466 after we know that we have valid addresses and no
2467 (subreg (mem ...) ...) constructs, since these interfere
2468 with determining the validity of the insn. */
2470 if ((SET_SRC (x) == var
2471 || (GET_CODE (SET_SRC (x)) == SUBREG
2472 && SUBREG_REG (SET_SRC (x)) == var))
2473 && (GET_CODE (SET_DEST (x)) == REG
2474 || (GET_CODE (SET_DEST (x)) == SUBREG
2475 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2476 && GET_MODE (var) == promoted_mode
2477 && x == single_set (insn))
2479 rtx pat;
2481 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2482 if (replacement->new)
2483 SET_SRC (x) = replacement->new;
2484 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2485 SET_SRC (x) = replacement->new
2486 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2487 else
2488 SET_SRC (x) = replacement->new
2489 = fixup_stack_1 (SET_SRC (x), insn);
2491 if (recog_memoized (insn) >= 0)
2492 return;
2494 /* INSN is not valid, but we know that we want to
2495 copy SET_SRC (x) to SET_DEST (x) in some way. So
2496 we generate the move and see whether it requires more
2497 than one insn. If it does, we emit those insns and
2498 delete INSN. Otherwise, we an just replace the pattern
2499 of INSN; we have already verified above that INSN has
2500 no other function that to do X. */
2502 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2503 if (GET_CODE (pat) == SEQUENCE)
2505 emit_insn_after (pat, insn);
2506 PUT_CODE (insn, NOTE);
2507 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2508 NOTE_SOURCE_FILE (insn) = 0;
2510 else
2511 PATTERN (insn) = pat;
2513 return;
2516 if ((SET_DEST (x) == var
2517 || (GET_CODE (SET_DEST (x)) == SUBREG
2518 && SUBREG_REG (SET_DEST (x)) == var))
2519 && (GET_CODE (SET_SRC (x)) == REG
2520 || (GET_CODE (SET_SRC (x)) == SUBREG
2521 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2522 && GET_MODE (var) == promoted_mode
2523 && x == single_set (insn))
2525 rtx pat;
2527 if (GET_CODE (SET_DEST (x)) == SUBREG)
2528 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2529 else
2530 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2532 if (recog_memoized (insn) >= 0)
2533 return;
2535 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2536 if (GET_CODE (pat) == SEQUENCE)
2538 emit_insn_after (pat, insn);
2539 PUT_CODE (insn, NOTE);
2540 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2541 NOTE_SOURCE_FILE (insn) = 0;
2543 else
2544 PATTERN (insn) = pat;
2546 return;
2549 /* Otherwise, storing into VAR must be handled specially
2550 by storing into a temporary and copying that into VAR
2551 with a new insn after this one. Note that this case
2552 will be used when storing into a promoted scalar since
2553 the insn will now have different modes on the input
2554 and output and hence will be invalid (except for the case
2555 of setting it to a constant, which does not need any
2556 change if it is valid). We generate extra code in that case,
2557 but combine.c will eliminate it. */
2559 if (dest == var)
2561 rtx temp;
2562 rtx fixeddest = SET_DEST (x);
2564 /* STRICT_LOW_PART can be discarded, around a MEM. */
2565 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2566 fixeddest = XEXP (fixeddest, 0);
2567 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2568 if (GET_CODE (fixeddest) == SUBREG)
2570 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2571 promoted_mode = GET_MODE (fixeddest);
2573 else
2574 fixeddest = fixup_stack_1 (fixeddest, insn);
2576 temp = gen_reg_rtx (promoted_mode);
2578 emit_insn_after (gen_move_insn (fixeddest,
2579 gen_lowpart (GET_MODE (fixeddest),
2580 temp)),
2581 insn);
2583 SET_DEST (x) = temp;
2587 default:
2588 break;
2591 /* Nothing special about this RTX; fix its operands. */
2593 fmt = GET_RTX_FORMAT (code);
2594 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2596 if (fmt[i] == 'e')
2597 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2598 if (fmt[i] == 'E')
2600 register int j;
2601 for (j = 0; j < XVECLEN (x, i); j++)
2602 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2603 insn, replacements);
2608 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2609 return an rtx (MEM:m1 newaddr) which is equivalent.
2610 If any insns must be emitted to compute NEWADDR, put them before INSN.
2612 UNCRITICAL nonzero means accept paradoxical subregs.
2613 This is used for subregs found inside REG_NOTES. */
2615 static rtx
2616 fixup_memory_subreg (x, insn, uncritical)
2617 rtx x;
2618 rtx insn;
2619 int uncritical;
2621 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2622 rtx addr = XEXP (SUBREG_REG (x), 0);
2623 enum machine_mode mode = GET_MODE (x);
2624 rtx result;
2626 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2627 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2628 && ! uncritical)
2629 abort ();
2631 if (BYTES_BIG_ENDIAN)
2632 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2633 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2634 addr = plus_constant (addr, offset);
2635 if (!flag_force_addr && memory_address_p (mode, addr))
2636 /* Shortcut if no insns need be emitted. */
2637 return change_address (SUBREG_REG (x), mode, addr);
2638 start_sequence ();
2639 result = change_address (SUBREG_REG (x), mode, addr);
2640 emit_insn_before (gen_sequence (), insn);
2641 end_sequence ();
2642 return result;
2645 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2646 Replace subexpressions of X in place.
2647 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2648 Otherwise return X, with its contents possibly altered.
2650 If any insns must be emitted to compute NEWADDR, put them before INSN.
2652 UNCRITICAL is as in fixup_memory_subreg. */
2654 static rtx
2655 walk_fixup_memory_subreg (x, insn, uncritical)
2656 register rtx x;
2657 rtx insn;
2658 int uncritical;
2660 register enum rtx_code code;
2661 register char *fmt;
2662 register int i;
2664 if (x == 0)
2665 return 0;
2667 code = GET_CODE (x);
2669 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2670 return fixup_memory_subreg (x, insn, uncritical);
2672 /* Nothing special about this RTX; fix its operands. */
2674 fmt = GET_RTX_FORMAT (code);
2675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2677 if (fmt[i] == 'e')
2678 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2679 if (fmt[i] == 'E')
2681 register int j;
2682 for (j = 0; j < XVECLEN (x, i); j++)
2683 XVECEXP (x, i, j)
2684 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2687 return x;
2690 /* For each memory ref within X, if it refers to a stack slot
2691 with an out of range displacement, put the address in a temp register
2692 (emitting new insns before INSN to load these registers)
2693 and alter the memory ref to use that register.
2694 Replace each such MEM rtx with a copy, to avoid clobberage. */
2696 static rtx
2697 fixup_stack_1 (x, insn)
2698 rtx x;
2699 rtx insn;
2701 register int i;
2702 register RTX_CODE code = GET_CODE (x);
2703 register char *fmt;
2705 if (code == MEM)
2707 register rtx ad = XEXP (x, 0);
2708 /* If we have address of a stack slot but it's not valid
2709 (displacement is too large), compute the sum in a register. */
2710 if (GET_CODE (ad) == PLUS
2711 && GET_CODE (XEXP (ad, 0)) == REG
2712 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2713 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2714 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2715 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2716 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2717 #endif
2718 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2719 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2720 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2721 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2723 rtx temp, seq;
2724 if (memory_address_p (GET_MODE (x), ad))
2725 return x;
2727 start_sequence ();
2728 temp = copy_to_reg (ad);
2729 seq = gen_sequence ();
2730 end_sequence ();
2731 emit_insn_before (seq, insn);
2732 return change_address (x, VOIDmode, temp);
2734 return x;
2737 fmt = GET_RTX_FORMAT (code);
2738 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2740 if (fmt[i] == 'e')
2741 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2742 if (fmt[i] == 'E')
2744 register int j;
2745 for (j = 0; j < XVECLEN (x, i); j++)
2746 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2749 return x;
2752 /* Optimization: a bit-field instruction whose field
2753 happens to be a byte or halfword in memory
2754 can be changed to a move instruction.
2756 We call here when INSN is an insn to examine or store into a bit-field.
2757 BODY is the SET-rtx to be altered.
2759 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2760 (Currently this is called only from function.c, and EQUIV_MEM
2761 is always 0.) */
2763 static void
2764 optimize_bit_field (body, insn, equiv_mem)
2765 rtx body;
2766 rtx insn;
2767 rtx *equiv_mem;
2769 register rtx bitfield;
2770 int destflag;
2771 rtx seq = 0;
2772 enum machine_mode mode;
2774 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2775 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2776 bitfield = SET_DEST (body), destflag = 1;
2777 else
2778 bitfield = SET_SRC (body), destflag = 0;
2780 /* First check that the field being stored has constant size and position
2781 and is in fact a byte or halfword suitably aligned. */
2783 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2784 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2785 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2786 != BLKmode)
2787 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2789 register rtx memref = 0;
2791 /* Now check that the containing word is memory, not a register,
2792 and that it is safe to change the machine mode. */
2794 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2795 memref = XEXP (bitfield, 0);
2796 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2797 && equiv_mem != 0)
2798 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2799 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2800 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2801 memref = SUBREG_REG (XEXP (bitfield, 0));
2802 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2803 && equiv_mem != 0
2804 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2805 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2807 if (memref
2808 && ! mode_dependent_address_p (XEXP (memref, 0))
2809 && ! MEM_VOLATILE_P (memref))
2811 /* Now adjust the address, first for any subreg'ing
2812 that we are now getting rid of,
2813 and then for which byte of the word is wanted. */
2815 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2816 rtx insns;
2818 /* Adjust OFFSET to count bits from low-address byte. */
2819 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2820 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2821 - offset - INTVAL (XEXP (bitfield, 1)));
2823 /* Adjust OFFSET to count bytes from low-address byte. */
2824 offset /= BITS_PER_UNIT;
2825 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2827 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2828 if (BYTES_BIG_ENDIAN)
2829 offset -= (MIN (UNITS_PER_WORD,
2830 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2831 - MIN (UNITS_PER_WORD,
2832 GET_MODE_SIZE (GET_MODE (memref))));
2835 start_sequence ();
2836 memref = change_address (memref, mode,
2837 plus_constant (XEXP (memref, 0), offset));
2838 insns = get_insns ();
2839 end_sequence ();
2840 emit_insns_before (insns, insn);
2842 /* Store this memory reference where
2843 we found the bit field reference. */
2845 if (destflag)
2847 validate_change (insn, &SET_DEST (body), memref, 1);
2848 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2850 rtx src = SET_SRC (body);
2851 while (GET_CODE (src) == SUBREG
2852 && SUBREG_WORD (src) == 0)
2853 src = SUBREG_REG (src);
2854 if (GET_MODE (src) != GET_MODE (memref))
2855 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2856 validate_change (insn, &SET_SRC (body), src, 1);
2858 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2859 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2860 /* This shouldn't happen because anything that didn't have
2861 one of these modes should have got converted explicitly
2862 and then referenced through a subreg.
2863 This is so because the original bit-field was
2864 handled by agg_mode and so its tree structure had
2865 the same mode that memref now has. */
2866 abort ();
2868 else
2870 rtx dest = SET_DEST (body);
2872 while (GET_CODE (dest) == SUBREG
2873 && SUBREG_WORD (dest) == 0
2874 && (GET_MODE_CLASS (GET_MODE (dest))
2875 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2876 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2877 <= UNITS_PER_WORD))
2878 dest = SUBREG_REG (dest);
2880 validate_change (insn, &SET_DEST (body), dest, 1);
2882 if (GET_MODE (dest) == GET_MODE (memref))
2883 validate_change (insn, &SET_SRC (body), memref, 1);
2884 else
2886 /* Convert the mem ref to the destination mode. */
2887 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2889 start_sequence ();
2890 convert_move (newreg, memref,
2891 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2892 seq = get_insns ();
2893 end_sequence ();
2895 validate_change (insn, &SET_SRC (body), newreg, 1);
2899 /* See if we can convert this extraction or insertion into
2900 a simple move insn. We might not be able to do so if this
2901 was, for example, part of a PARALLEL.
2903 If we succeed, write out any needed conversions. If we fail,
2904 it is hard to guess why we failed, so don't do anything
2905 special; just let the optimization be suppressed. */
2907 if (apply_change_group () && seq)
2908 emit_insns_before (seq, insn);
2913 /* These routines are responsible for converting virtual register references
2914 to the actual hard register references once RTL generation is complete.
2916 The following four variables are used for communication between the
2917 routines. They contain the offsets of the virtual registers from their
2918 respective hard registers. */
2920 static int in_arg_offset;
2921 static int var_offset;
2922 static int dynamic_offset;
2923 static int out_arg_offset;
2924 static int cfa_offset;
2926 /* In most machines, the stack pointer register is equivalent to the bottom
2927 of the stack. */
2929 #ifndef STACK_POINTER_OFFSET
2930 #define STACK_POINTER_OFFSET 0
2931 #endif
2933 /* If not defined, pick an appropriate default for the offset of dynamically
2934 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2935 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2937 #ifndef STACK_DYNAMIC_OFFSET
2939 #ifdef ACCUMULATE_OUTGOING_ARGS
2940 /* The bottom of the stack points to the actual arguments. If
2941 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2942 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2943 stack space for register parameters is not pushed by the caller, but
2944 rather part of the fixed stack areas and hence not included in
2945 `current_function_outgoing_args_size'. Nevertheless, we must allow
2946 for it when allocating stack dynamic objects. */
2948 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2949 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2950 (current_function_outgoing_args_size \
2951 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2953 #else
2954 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2955 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2956 #endif
2958 #else
2959 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2960 #endif
2961 #endif
2963 /* On a few machines, the CFA coincides with the arg pointer. */
2965 #ifndef ARG_POINTER_CFA_OFFSET
2966 #define ARG_POINTER_CFA_OFFSET 0
2967 #endif
2970 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2971 its address taken. DECL is the decl for the object stored in the
2972 register, for later use if we do need to force REG into the stack.
2973 REG is overwritten by the MEM like in put_reg_into_stack. */
2976 gen_mem_addressof (reg, decl)
2977 rtx reg;
2978 tree decl;
2980 tree type = TREE_TYPE (decl);
2981 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2982 SET_ADDRESSOF_DECL (r, decl);
2983 /* If the original REG was a user-variable, then so is the REG whose
2984 address is being taken. */
2985 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2987 XEXP (reg, 0) = r;
2988 PUT_CODE (reg, MEM);
2989 PUT_MODE (reg, DECL_MODE (decl));
2990 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2991 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2992 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2994 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2995 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2997 return reg;
3000 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
3002 void
3003 flush_addressof (decl)
3004 tree decl;
3006 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
3007 && DECL_RTL (decl) != 0
3008 && GET_CODE (DECL_RTL (decl)) == MEM
3009 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3010 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3011 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3014 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
3016 static void
3017 put_addressof_into_stack (r, ht)
3018 rtx r;
3019 struct hash_table *ht;
3021 tree decl = ADDRESSOF_DECL (r);
3022 rtx reg = XEXP (r, 0);
3024 if (GET_CODE (reg) != REG)
3025 abort ();
3027 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
3028 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
3029 ADDRESSOF_REGNO (r),
3030 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
3033 /* List of replacements made below in purge_addressof_1 when creating
3034 bitfield insertions. */
3035 static rtx purge_bitfield_addressof_replacements;
3037 /* List of replacements made below in purge_addressof_1 for patterns
3038 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3039 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3040 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3041 enough in complex cases, e.g. when some field values can be
3042 extracted by usage MEM with narrower mode. */
3043 static rtx purge_addressof_replacements;
3045 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3046 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3047 the stack. */
3049 static void
3050 purge_addressof_1 (loc, insn, force, store, ht)
3051 rtx *loc;
3052 rtx insn;
3053 int force, store;
3054 struct hash_table *ht;
3056 rtx x;
3057 RTX_CODE code;
3058 int i, j;
3059 char *fmt;
3061 /* Re-start here to avoid recursion in common cases. */
3062 restart:
3064 x = *loc;
3065 if (x == 0)
3066 return;
3068 code = GET_CODE (x);
3070 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3072 rtx insns;
3073 /* We must create a copy of the rtx because it was created by
3074 overwriting a REG rtx which is always shared. */
3075 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3077 if (validate_change (insn, loc, sub, 0)
3078 || validate_replace_rtx (x, sub, insn))
3079 return;
3081 start_sequence ();
3082 sub = force_operand (sub, NULL_RTX);
3083 if (! validate_change (insn, loc, sub, 0)
3084 && ! validate_replace_rtx (x, sub, insn))
3085 abort ();
3087 insns = gen_sequence ();
3088 end_sequence ();
3089 emit_insn_before (insns, insn);
3090 return;
3092 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3094 rtx sub = XEXP (XEXP (x, 0), 0);
3095 rtx sub2;
3097 if (GET_CODE (sub) == MEM)
3099 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3100 MEM_COPY_ATTRIBUTES (sub2, sub);
3101 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3102 sub = sub2;
3105 if (GET_CODE (sub) == REG
3106 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3108 put_addressof_into_stack (XEXP (x, 0), ht);
3109 return;
3111 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3113 int size_x, size_sub;
3115 if (!insn)
3117 /* When processing REG_NOTES look at the list of
3118 replacements done on the insn to find the register that X
3119 was replaced by. */
3120 rtx tem;
3122 for (tem = purge_bitfield_addressof_replacements;
3123 tem != NULL_RTX;
3124 tem = XEXP (XEXP (tem, 1), 1))
3125 if (rtx_equal_p (x, XEXP (tem, 0)))
3127 *loc = XEXP (XEXP (tem, 1), 0);
3128 return;
3131 /* See comment for purge_addressof_replacements. */
3132 for (tem = purge_addressof_replacements;
3133 tem != NULL_RTX;
3134 tem = XEXP (XEXP (tem, 1), 1))
3135 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3137 rtx z = XEXP (XEXP (tem, 1), 0);
3139 if (GET_MODE (x) == GET_MODE (z)
3140 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3141 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3142 abort ();
3144 /* It can happen that the note may speak of things
3145 in a wider (or just different) mode than the
3146 code did. This is especially true of
3147 REG_RETVAL. */
3149 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3150 z = SUBREG_REG (z);
3152 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3153 && (GET_MODE_SIZE (GET_MODE (x))
3154 > GET_MODE_SIZE (GET_MODE (z))))
3156 /* This can occur as a result in invalid
3157 pointer casts, e.g. float f; ...
3158 *(long long int *)&f.
3159 ??? We could emit a warning here, but
3160 without a line number that wouldn't be
3161 very helpful. */
3162 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3164 else
3165 z = gen_lowpart (GET_MODE (x), z);
3167 *loc = z;
3168 return;
3171 /* There should always be such a replacement. */
3172 abort ();
3175 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3176 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3178 /* Don't even consider working with paradoxical subregs,
3179 or the moral equivalent seen here. */
3180 if (size_x <= size_sub
3181 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3183 /* Do a bitfield insertion to mirror what would happen
3184 in memory. */
3186 rtx val, seq;
3188 if (store)
3190 rtx p = PREV_INSN (insn);
3192 start_sequence ();
3193 val = gen_reg_rtx (GET_MODE (x));
3194 if (! validate_change (insn, loc, val, 0))
3196 /* Discard the current sequence and put the
3197 ADDRESSOF on stack. */
3198 end_sequence ();
3199 goto give_up;
3201 seq = gen_sequence ();
3202 end_sequence ();
3203 emit_insn_before (seq, insn);
3204 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3205 insn, ht);
3207 start_sequence ();
3208 store_bit_field (sub, size_x, 0, GET_MODE (x),
3209 val, GET_MODE_SIZE (GET_MODE (sub)),
3210 GET_MODE_SIZE (GET_MODE (sub)));
3212 /* Make sure to unshare any shared rtl that store_bit_field
3213 might have created. */
3214 for (p = get_insns(); p; p = NEXT_INSN (p))
3216 reset_used_flags (PATTERN (p));
3217 reset_used_flags (REG_NOTES (p));
3218 reset_used_flags (LOG_LINKS (p));
3220 unshare_all_rtl (get_insns ());
3222 seq = gen_sequence ();
3223 end_sequence ();
3224 p = emit_insn_after (seq, insn);
3225 if (NEXT_INSN (insn))
3226 compute_insns_for_mem (NEXT_INSN (insn),
3227 p ? NEXT_INSN (p) : NULL_RTX,
3228 ht);
3230 else
3232 rtx p = PREV_INSN (insn);
3234 start_sequence ();
3235 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3236 GET_MODE (x), GET_MODE (x),
3237 GET_MODE_SIZE (GET_MODE (sub)),
3238 GET_MODE_SIZE (GET_MODE (sub)));
3240 if (! validate_change (insn, loc, val, 0))
3242 /* Discard the current sequence and put the
3243 ADDRESSOF on stack. */
3244 end_sequence ();
3245 goto give_up;
3248 seq = gen_sequence ();
3249 end_sequence ();
3250 emit_insn_before (seq, insn);
3251 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3252 insn, ht);
3255 /* Remember the replacement so that the same one can be done
3256 on the REG_NOTES. */
3257 purge_bitfield_addressof_replacements
3258 = gen_rtx_EXPR_LIST (VOIDmode, x,
3259 gen_rtx_EXPR_LIST
3260 (VOIDmode, val,
3261 purge_bitfield_addressof_replacements));
3263 /* We replaced with a reg -- all done. */
3264 return;
3267 else if (validate_change (insn, loc, sub, 0))
3269 /* Remember the replacement so that the same one can be done
3270 on the REG_NOTES. */
3271 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3273 rtx tem;
3275 for (tem = purge_addressof_replacements;
3276 tem != NULL_RTX;
3277 tem = XEXP (XEXP (tem, 1), 1))
3278 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3280 XEXP (XEXP (tem, 1), 0) = sub;
3281 return;
3283 purge_addressof_replacements
3284 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3285 gen_rtx_EXPR_LIST (VOIDmode, sub,
3286 purge_addressof_replacements));
3287 return;
3289 goto restart;
3291 give_up:;
3292 /* else give up and put it into the stack */
3294 else if (code == ADDRESSOF)
3296 put_addressof_into_stack (x, ht);
3297 return;
3299 else if (code == SET)
3301 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3302 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3303 return;
3306 /* Scan all subexpressions. */
3307 fmt = GET_RTX_FORMAT (code);
3308 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3310 if (*fmt == 'e')
3311 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3312 else if (*fmt == 'E')
3313 for (j = 0; j < XVECLEN (x, i); j++)
3314 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3318 /* Return a new hash table entry in HT. */
3320 static struct hash_entry *
3321 insns_for_mem_newfunc (he, ht, k)
3322 struct hash_entry *he;
3323 struct hash_table *ht;
3324 hash_table_key k ATTRIBUTE_UNUSED;
3326 struct insns_for_mem_entry *ifmhe;
3327 if (he)
3328 return he;
3330 ifmhe = ((struct insns_for_mem_entry *)
3331 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3332 ifmhe->insns = NULL_RTX;
3334 return &ifmhe->he;
3337 /* Return a hash value for K, a REG. */
3339 static unsigned long
3340 insns_for_mem_hash (k)
3341 hash_table_key k;
3343 /* K is really a RTX. Just use the address as the hash value. */
3344 return (unsigned long) k;
3347 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3349 static boolean
3350 insns_for_mem_comp (k1, k2)
3351 hash_table_key k1;
3352 hash_table_key k2;
3354 return k1 == k2;
3357 struct insns_for_mem_walk_info {
3358 /* The hash table that we are using to record which INSNs use which
3359 MEMs. */
3360 struct hash_table *ht;
3362 /* The INSN we are currently proessing. */
3363 rtx insn;
3365 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3366 to find the insns that use the REGs in the ADDRESSOFs. */
3367 int pass;
3370 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3371 that might be used in an ADDRESSOF expression, record this INSN in
3372 the hash table given by DATA (which is really a pointer to an
3373 insns_for_mem_walk_info structure). */
3375 static int
3376 insns_for_mem_walk (r, data)
3377 rtx *r;
3378 void *data;
3380 struct insns_for_mem_walk_info *ifmwi
3381 = (struct insns_for_mem_walk_info *) data;
3383 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3384 && GET_CODE (XEXP (*r, 0)) == REG)
3385 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3386 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3388 /* Lookup this MEM in the hashtable, creating it if necessary. */
3389 struct insns_for_mem_entry *ifme
3390 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3392 /*create=*/0,
3393 /*copy=*/0);
3395 /* If we have not already recorded this INSN, do so now. Since
3396 we process the INSNs in order, we know that if we have
3397 recorded it it must be at the front of the list. */
3398 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3400 /* We do the allocation on the same obstack as is used for
3401 the hash table since this memory will not be used once
3402 the hash table is deallocated. */
3403 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3404 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3405 ifme->insns);
3406 pop_obstacks ();
3410 return 0;
3413 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3414 which REGs in HT. */
3416 static void
3417 compute_insns_for_mem (insns, last_insn, ht)
3418 rtx insns;
3419 rtx last_insn;
3420 struct hash_table *ht;
3422 rtx insn;
3423 struct insns_for_mem_walk_info ifmwi;
3424 ifmwi.ht = ht;
3426 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3427 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3428 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3430 ifmwi.insn = insn;
3431 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3435 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3436 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3437 stack. */
3439 void
3440 purge_addressof (insns)
3441 rtx insns;
3443 rtx insn;
3444 struct hash_table ht;
3446 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3447 requires a fixup pass over the instruction stream to correct
3448 INSNs that depended on the REG being a REG, and not a MEM. But,
3449 these fixup passes are slow. Furthermore, more MEMs are not
3450 mentioned in very many instructions. So, we speed up the process
3451 by pre-calculating which REGs occur in which INSNs; that allows
3452 us to perform the fixup passes much more quickly. */
3453 hash_table_init (&ht,
3454 insns_for_mem_newfunc,
3455 insns_for_mem_hash,
3456 insns_for_mem_comp);
3457 compute_insns_for_mem (insns, NULL_RTX, &ht);
3459 for (insn = insns; insn; insn = NEXT_INSN (insn))
3460 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3461 || GET_CODE (insn) == CALL_INSN)
3463 purge_addressof_1 (&PATTERN (insn), insn,
3464 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3465 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht);
3468 /* Clean up. */
3469 hash_table_free (&ht);
3470 purge_bitfield_addressof_replacements = 0;
3471 purge_addressof_replacements = 0;
3474 /* Pass through the INSNS of function FNDECL and convert virtual register
3475 references to hard register references. */
3477 void
3478 instantiate_virtual_regs (fndecl, insns)
3479 tree fndecl;
3480 rtx insns;
3482 rtx insn;
3483 int i;
3485 /* Compute the offsets to use for this function. */
3486 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3487 var_offset = STARTING_FRAME_OFFSET;
3488 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3489 out_arg_offset = STACK_POINTER_OFFSET;
3490 cfa_offset = ARG_POINTER_CFA_OFFSET;
3492 /* Scan all variables and parameters of this function. For each that is
3493 in memory, instantiate all virtual registers if the result is a valid
3494 address. If not, we do it later. That will handle most uses of virtual
3495 regs on many machines. */
3496 instantiate_decls (fndecl, 1);
3498 /* Initialize recognition, indicating that volatile is OK. */
3499 init_recog ();
3501 /* Scan through all the insns, instantiating every virtual register still
3502 present. */
3503 for (insn = insns; insn; insn = NEXT_INSN (insn))
3504 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3505 || GET_CODE (insn) == CALL_INSN)
3507 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3508 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3511 /* Instantiate the stack slots for the parm registers, for later use in
3512 addressof elimination. */
3513 for (i = 0; i < max_parm_reg; ++i)
3514 if (parm_reg_stack_loc[i])
3515 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3517 /* Now instantiate the remaining register equivalences for debugging info.
3518 These will not be valid addresses. */
3519 instantiate_decls (fndecl, 0);
3521 /* Indicate that, from now on, assign_stack_local should use
3522 frame_pointer_rtx. */
3523 virtuals_instantiated = 1;
3526 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3527 all virtual registers in their DECL_RTL's.
3529 If VALID_ONLY, do this only if the resulting address is still valid.
3530 Otherwise, always do it. */
3532 static void
3533 instantiate_decls (fndecl, valid_only)
3534 tree fndecl;
3535 int valid_only;
3537 tree decl;
3539 if (DECL_SAVED_INSNS (fndecl))
3540 /* When compiling an inline function, the obstack used for
3541 rtl allocation is the maybepermanent_obstack. Calling
3542 `resume_temporary_allocation' switches us back to that
3543 obstack while we process this function's parameters. */
3544 resume_temporary_allocation ();
3546 /* Process all parameters of the function. */
3547 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3549 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3551 instantiate_decl (DECL_RTL (decl), size, valid_only);
3553 /* If the parameter was promoted, then the incoming RTL mode may be
3554 larger than the declared type size. We must use the larger of
3555 the two sizes. */
3556 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3557 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3560 /* Now process all variables defined in the function or its subblocks. */
3561 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3563 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3565 /* Save all rtl allocated for this function by raising the
3566 high-water mark on the maybepermanent_obstack. */
3567 preserve_data ();
3568 /* All further rtl allocation is now done in the current_obstack. */
3569 rtl_in_current_obstack ();
3573 /* Subroutine of instantiate_decls: Process all decls in the given
3574 BLOCK node and all its subblocks. */
3576 static void
3577 instantiate_decls_1 (let, valid_only)
3578 tree let;
3579 int valid_only;
3581 tree t;
3583 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3584 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3585 valid_only);
3587 /* Process all subblocks. */
3588 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3589 instantiate_decls_1 (t, valid_only);
3592 /* Subroutine of the preceding procedures: Given RTL representing a
3593 decl and the size of the object, do any instantiation required.
3595 If VALID_ONLY is non-zero, it means that the RTL should only be
3596 changed if the new address is valid. */
3598 static void
3599 instantiate_decl (x, size, valid_only)
3600 rtx x;
3601 int size;
3602 int valid_only;
3604 enum machine_mode mode;
3605 rtx addr;
3607 /* If this is not a MEM, no need to do anything. Similarly if the
3608 address is a constant or a register that is not a virtual register. */
3610 if (x == 0 || GET_CODE (x) != MEM)
3611 return;
3613 addr = XEXP (x, 0);
3614 if (CONSTANT_P (addr)
3615 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3616 || (GET_CODE (addr) == REG
3617 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3618 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3619 return;
3621 /* If we should only do this if the address is valid, copy the address.
3622 We need to do this so we can undo any changes that might make the
3623 address invalid. This copy is unfortunate, but probably can't be
3624 avoided. */
3626 if (valid_only)
3627 addr = copy_rtx (addr);
3629 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3631 if (valid_only)
3633 /* Now verify that the resulting address is valid for every integer or
3634 floating-point mode up to and including SIZE bytes long. We do this
3635 since the object might be accessed in any mode and frame addresses
3636 are shared. */
3638 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3639 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3640 mode = GET_MODE_WIDER_MODE (mode))
3641 if (! memory_address_p (mode, addr))
3642 return;
3644 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3645 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3646 mode = GET_MODE_WIDER_MODE (mode))
3647 if (! memory_address_p (mode, addr))
3648 return;
3651 /* Put back the address now that we have updated it and we either know
3652 it is valid or we don't care whether it is valid. */
3654 XEXP (x, 0) = addr;
3657 /* Given a pointer to a piece of rtx and an optional pointer to the
3658 containing object, instantiate any virtual registers present in it.
3660 If EXTRA_INSNS, we always do the replacement and generate
3661 any extra insns before OBJECT. If it zero, we do nothing if replacement
3662 is not valid.
3664 Return 1 if we either had nothing to do or if we were able to do the
3665 needed replacement. Return 0 otherwise; we only return zero if
3666 EXTRA_INSNS is zero.
3668 We first try some simple transformations to avoid the creation of extra
3669 pseudos. */
3671 static int
3672 instantiate_virtual_regs_1 (loc, object, extra_insns)
3673 rtx *loc;
3674 rtx object;
3675 int extra_insns;
3677 rtx x;
3678 RTX_CODE code;
3679 rtx new = 0;
3680 HOST_WIDE_INT offset = 0;
3681 rtx temp;
3682 rtx seq;
3683 int i, j;
3684 char *fmt;
3686 /* Re-start here to avoid recursion in common cases. */
3687 restart:
3689 x = *loc;
3690 if (x == 0)
3691 return 1;
3693 code = GET_CODE (x);
3695 /* Check for some special cases. */
3696 switch (code)
3698 case CONST_INT:
3699 case CONST_DOUBLE:
3700 case CONST:
3701 case SYMBOL_REF:
3702 case CODE_LABEL:
3703 case PC:
3704 case CC0:
3705 case ASM_INPUT:
3706 case ADDR_VEC:
3707 case ADDR_DIFF_VEC:
3708 case RETURN:
3709 return 1;
3711 case SET:
3712 /* We are allowed to set the virtual registers. This means that
3713 the actual register should receive the source minus the
3714 appropriate offset. This is used, for example, in the handling
3715 of non-local gotos. */
3716 if (SET_DEST (x) == virtual_incoming_args_rtx)
3717 new = arg_pointer_rtx, offset = - in_arg_offset;
3718 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3719 new = frame_pointer_rtx, offset = - var_offset;
3720 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3721 new = stack_pointer_rtx, offset = - dynamic_offset;
3722 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3723 new = stack_pointer_rtx, offset = - out_arg_offset;
3724 else if (SET_DEST (x) == virtual_cfa_rtx)
3725 new = arg_pointer_rtx, offset = - cfa_offset;
3727 if (new)
3729 /* The only valid sources here are PLUS or REG. Just do
3730 the simplest possible thing to handle them. */
3731 if (GET_CODE (SET_SRC (x)) != REG
3732 && GET_CODE (SET_SRC (x)) != PLUS)
3733 abort ();
3735 start_sequence ();
3736 if (GET_CODE (SET_SRC (x)) != REG)
3737 temp = force_operand (SET_SRC (x), NULL_RTX);
3738 else
3739 temp = SET_SRC (x);
3740 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3741 seq = get_insns ();
3742 end_sequence ();
3744 emit_insns_before (seq, object);
3745 SET_DEST (x) = new;
3747 if (! validate_change (object, &SET_SRC (x), temp, 0)
3748 || ! extra_insns)
3749 abort ();
3751 return 1;
3754 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3755 loc = &SET_SRC (x);
3756 goto restart;
3758 case PLUS:
3759 /* Handle special case of virtual register plus constant. */
3760 if (CONSTANT_P (XEXP (x, 1)))
3762 rtx old, new_offset;
3764 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3765 if (GET_CODE (XEXP (x, 0)) == PLUS)
3767 rtx inner = XEXP (XEXP (x, 0), 0);
3769 if (inner == virtual_incoming_args_rtx)
3770 new = arg_pointer_rtx, offset = in_arg_offset;
3771 else if (inner == virtual_stack_vars_rtx)
3772 new = frame_pointer_rtx, offset = var_offset;
3773 else if (inner == virtual_stack_dynamic_rtx)
3774 new = stack_pointer_rtx, offset = dynamic_offset;
3775 else if (inner == virtual_outgoing_args_rtx)
3776 new = stack_pointer_rtx, offset = out_arg_offset;
3777 else if (inner == virtual_cfa_rtx)
3778 new = arg_pointer_rtx, offset = cfa_offset;
3779 else
3781 loc = &XEXP (x, 0);
3782 goto restart;
3785 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3786 extra_insns);
3787 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3790 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3791 new = arg_pointer_rtx, offset = in_arg_offset;
3792 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3793 new = frame_pointer_rtx, offset = var_offset;
3794 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3795 new = stack_pointer_rtx, offset = dynamic_offset;
3796 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3797 new = stack_pointer_rtx, offset = out_arg_offset;
3798 else if (XEXP (x, 0) == virtual_cfa_rtx)
3799 new = arg_pointer_rtx, offset = cfa_offset;
3800 else
3802 /* We know the second operand is a constant. Unless the
3803 first operand is a REG (which has been already checked),
3804 it needs to be checked. */
3805 if (GET_CODE (XEXP (x, 0)) != REG)
3807 loc = &XEXP (x, 0);
3808 goto restart;
3810 return 1;
3813 new_offset = plus_constant (XEXP (x, 1), offset);
3815 /* If the new constant is zero, try to replace the sum with just
3816 the register. */
3817 if (new_offset == const0_rtx
3818 && validate_change (object, loc, new, 0))
3819 return 1;
3821 /* Next try to replace the register and new offset.
3822 There are two changes to validate here and we can't assume that
3823 in the case of old offset equals new just changing the register
3824 will yield a valid insn. In the interests of a little efficiency,
3825 however, we only call validate change once (we don't queue up the
3826 changes and then call apply_change_group). */
3828 old = XEXP (x, 0);
3829 if (offset == 0
3830 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3831 : (XEXP (x, 0) = new,
3832 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3834 if (! extra_insns)
3836 XEXP (x, 0) = old;
3837 return 0;
3840 /* Otherwise copy the new constant into a register and replace
3841 constant with that register. */
3842 temp = gen_reg_rtx (Pmode);
3843 XEXP (x, 0) = new;
3844 if (validate_change (object, &XEXP (x, 1), temp, 0))
3845 emit_insn_before (gen_move_insn (temp, new_offset), object);
3846 else
3848 /* If that didn't work, replace this expression with a
3849 register containing the sum. */
3851 XEXP (x, 0) = old;
3852 new = gen_rtx_PLUS (Pmode, new, new_offset);
3854 start_sequence ();
3855 temp = force_operand (new, NULL_RTX);
3856 seq = get_insns ();
3857 end_sequence ();
3859 emit_insns_before (seq, object);
3860 if (! validate_change (object, loc, temp, 0)
3861 && ! validate_replace_rtx (x, temp, object))
3862 abort ();
3866 return 1;
3869 /* Fall through to generic two-operand expression case. */
3870 case EXPR_LIST:
3871 case CALL:
3872 case COMPARE:
3873 case MINUS:
3874 case MULT:
3875 case DIV: case UDIV:
3876 case MOD: case UMOD:
3877 case AND: case IOR: case XOR:
3878 case ROTATERT: case ROTATE:
3879 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3880 case NE: case EQ:
3881 case GE: case GT: case GEU: case GTU:
3882 case LE: case LT: case LEU: case LTU:
3883 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3884 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3885 loc = &XEXP (x, 0);
3886 goto restart;
3888 case MEM:
3889 /* Most cases of MEM that convert to valid addresses have already been
3890 handled by our scan of decls. The only special handling we
3891 need here is to make a copy of the rtx to ensure it isn't being
3892 shared if we have to change it to a pseudo.
3894 If the rtx is a simple reference to an address via a virtual register,
3895 it can potentially be shared. In such cases, first try to make it
3896 a valid address, which can also be shared. Otherwise, copy it and
3897 proceed normally.
3899 First check for common cases that need no processing. These are
3900 usually due to instantiation already being done on a previous instance
3901 of a shared rtx. */
3903 temp = XEXP (x, 0);
3904 if (CONSTANT_ADDRESS_P (temp)
3905 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3906 || temp == arg_pointer_rtx
3907 #endif
3908 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3909 || temp == hard_frame_pointer_rtx
3910 #endif
3911 || temp == frame_pointer_rtx)
3912 return 1;
3914 if (GET_CODE (temp) == PLUS
3915 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3916 && (XEXP (temp, 0) == frame_pointer_rtx
3917 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3918 || XEXP (temp, 0) == hard_frame_pointer_rtx
3919 #endif
3920 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3921 || XEXP (temp, 0) == arg_pointer_rtx
3922 #endif
3924 return 1;
3926 if (temp == virtual_stack_vars_rtx
3927 || temp == virtual_incoming_args_rtx
3928 || (GET_CODE (temp) == PLUS
3929 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3930 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3931 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3933 /* This MEM may be shared. If the substitution can be done without
3934 the need to generate new pseudos, we want to do it in place
3935 so all copies of the shared rtx benefit. The call below will
3936 only make substitutions if the resulting address is still
3937 valid.
3939 Note that we cannot pass X as the object in the recursive call
3940 since the insn being processed may not allow all valid
3941 addresses. However, if we were not passed on object, we can
3942 only modify X without copying it if X will have a valid
3943 address.
3945 ??? Also note that this can still lose if OBJECT is an insn that
3946 has less restrictions on an address that some other insn.
3947 In that case, we will modify the shared address. This case
3948 doesn't seem very likely, though. One case where this could
3949 happen is in the case of a USE or CLOBBER reference, but we
3950 take care of that below. */
3952 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3953 object ? object : x, 0))
3954 return 1;
3956 /* Otherwise make a copy and process that copy. We copy the entire
3957 RTL expression since it might be a PLUS which could also be
3958 shared. */
3959 *loc = x = copy_rtx (x);
3962 /* Fall through to generic unary operation case. */
3963 case SUBREG:
3964 case STRICT_LOW_PART:
3965 case NEG: case NOT:
3966 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3967 case SIGN_EXTEND: case ZERO_EXTEND:
3968 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3969 case FLOAT: case FIX:
3970 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3971 case ABS:
3972 case SQRT:
3973 case FFS:
3974 /* These case either have just one operand or we know that we need not
3975 check the rest of the operands. */
3976 loc = &XEXP (x, 0);
3977 goto restart;
3979 case USE:
3980 case CLOBBER:
3981 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3982 go ahead and make the invalid one, but do it to a copy. For a REG,
3983 just make the recursive call, since there's no chance of a problem. */
3985 if ((GET_CODE (XEXP (x, 0)) == MEM
3986 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3988 || (GET_CODE (XEXP (x, 0)) == REG
3989 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3990 return 1;
3992 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3993 loc = &XEXP (x, 0);
3994 goto restart;
3996 case REG:
3997 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3998 in front of this insn and substitute the temporary. */
3999 if (x == virtual_incoming_args_rtx)
4000 new = arg_pointer_rtx, offset = in_arg_offset;
4001 else if (x == virtual_stack_vars_rtx)
4002 new = frame_pointer_rtx, offset = var_offset;
4003 else if (x == virtual_stack_dynamic_rtx)
4004 new = stack_pointer_rtx, offset = dynamic_offset;
4005 else if (x == virtual_outgoing_args_rtx)
4006 new = stack_pointer_rtx, offset = out_arg_offset;
4007 else if (x == virtual_cfa_rtx)
4008 new = arg_pointer_rtx, offset = cfa_offset;
4010 if (new)
4012 temp = plus_constant (new, offset);
4013 if (!validate_change (object, loc, temp, 0))
4015 if (! extra_insns)
4016 return 0;
4018 start_sequence ();
4019 temp = force_operand (temp, NULL_RTX);
4020 seq = get_insns ();
4021 end_sequence ();
4023 emit_insns_before (seq, object);
4024 if (! validate_change (object, loc, temp, 0)
4025 && ! validate_replace_rtx (x, temp, object))
4026 abort ();
4030 return 1;
4032 case ADDRESSOF:
4033 if (GET_CODE (XEXP (x, 0)) == REG)
4034 return 1;
4036 else if (GET_CODE (XEXP (x, 0)) == MEM)
4038 /* If we have a (addressof (mem ..)), do any instantiation inside
4039 since we know we'll be making the inside valid when we finally
4040 remove the ADDRESSOF. */
4041 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4042 return 1;
4044 break;
4046 default:
4047 break;
4050 /* Scan all subexpressions. */
4051 fmt = GET_RTX_FORMAT (code);
4052 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4053 if (*fmt == 'e')
4055 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4056 return 0;
4058 else if (*fmt == 'E')
4059 for (j = 0; j < XVECLEN (x, i); j++)
4060 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4061 extra_insns))
4062 return 0;
4064 return 1;
4067 /* Optimization: assuming this function does not receive nonlocal gotos,
4068 delete the handlers for such, as well as the insns to establish
4069 and disestablish them. */
4071 static void
4072 delete_handlers ()
4074 rtx insn;
4075 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4077 /* Delete the handler by turning off the flag that would
4078 prevent jump_optimize from deleting it.
4079 Also permit deletion of the nonlocal labels themselves
4080 if nothing local refers to them. */
4081 if (GET_CODE (insn) == CODE_LABEL)
4083 tree t, last_t;
4085 LABEL_PRESERVE_P (insn) = 0;
4087 /* Remove it from the nonlocal_label list, to avoid confusing
4088 flow. */
4089 for (t = nonlocal_labels, last_t = 0; t;
4090 last_t = t, t = TREE_CHAIN (t))
4091 if (DECL_RTL (TREE_VALUE (t)) == insn)
4092 break;
4093 if (t)
4095 if (! last_t)
4096 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4097 else
4098 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4101 if (GET_CODE (insn) == INSN)
4103 int can_delete = 0;
4104 rtx t;
4105 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4106 if (reg_mentioned_p (t, PATTERN (insn)))
4108 can_delete = 1;
4109 break;
4111 if (can_delete
4112 || (nonlocal_goto_stack_level != 0
4113 && reg_mentioned_p (nonlocal_goto_stack_level,
4114 PATTERN (insn))))
4115 delete_insn (insn);
4120 /* Output a USE for any register use in RTL.
4121 This is used with -noreg to mark the extent of lifespan
4122 of any registers used in a user-visible variable's DECL_RTL. */
4124 void
4125 use_variable (rtl)
4126 rtx rtl;
4128 if (GET_CODE (rtl) == REG)
4129 /* This is a register variable. */
4130 emit_insn (gen_rtx_USE (VOIDmode, rtl));
4131 else if (GET_CODE (rtl) == MEM
4132 && GET_CODE (XEXP (rtl, 0)) == REG
4133 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4134 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4135 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4136 /* This is a variable-sized structure. */
4137 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
4140 /* Like use_variable except that it outputs the USEs after INSN
4141 instead of at the end of the insn-chain. */
4143 void
4144 use_variable_after (rtl, insn)
4145 rtx rtl, insn;
4147 if (GET_CODE (rtl) == REG)
4148 /* This is a register variable. */
4149 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
4150 else if (GET_CODE (rtl) == MEM
4151 && GET_CODE (XEXP (rtl, 0)) == REG
4152 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4153 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4154 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4155 /* This is a variable-sized structure. */
4156 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
4160 max_parm_reg_num ()
4162 return max_parm_reg;
4165 /* Return the first insn following those generated by `assign_parms'. */
4168 get_first_nonparm_insn ()
4170 if (last_parm_insn)
4171 return NEXT_INSN (last_parm_insn);
4172 return get_insns ();
4175 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4176 Crash if there is none. */
4179 get_first_block_beg ()
4181 register rtx searcher;
4182 register rtx insn = get_first_nonparm_insn ();
4184 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4185 if (GET_CODE (searcher) == NOTE
4186 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4187 return searcher;
4189 abort (); /* Invalid call to this function. (See comments above.) */
4190 return NULL_RTX;
4193 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4194 This means a type for which function calls must pass an address to the
4195 function or get an address back from the function.
4196 EXP may be a type node or an expression (whose type is tested). */
4199 aggregate_value_p (exp)
4200 tree exp;
4202 int i, regno, nregs;
4203 rtx reg;
4204 tree type;
4205 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4206 type = exp;
4207 else
4208 type = TREE_TYPE (exp);
4210 if (RETURN_IN_MEMORY (type))
4211 return 1;
4212 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4213 and thus can't be returned in registers. */
4214 if (TREE_ADDRESSABLE (type))
4215 return 1;
4216 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4217 return 1;
4218 /* Make sure we have suitable call-clobbered regs to return
4219 the value in; if not, we must return it in memory. */
4220 reg = hard_function_value (type, 0);
4222 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4223 it is OK. */
4224 if (GET_CODE (reg) != REG)
4225 return 0;
4227 regno = REGNO (reg);
4228 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4229 for (i = 0; i < nregs; i++)
4230 if (! call_used_regs[regno + i])
4231 return 1;
4232 return 0;
4235 /* Assign RTL expressions to the function's parameters.
4236 This may involve copying them into registers and using
4237 those registers as the RTL for them.
4239 If SECOND_TIME is non-zero it means that this function is being
4240 called a second time. This is done by integrate.c when a function's
4241 compilation is deferred. We need to come back here in case the
4242 FUNCTION_ARG macro computes items needed for the rest of the compilation
4243 (such as changing which registers are fixed or caller-saved). But suppress
4244 writing any insns or setting DECL_RTL of anything in this case. */
4246 void
4247 assign_parms (fndecl, second_time)
4248 tree fndecl;
4249 int second_time;
4251 register tree parm;
4252 register rtx entry_parm = 0;
4253 register rtx stack_parm = 0;
4254 CUMULATIVE_ARGS args_so_far;
4255 enum machine_mode promoted_mode, passed_mode;
4256 enum machine_mode nominal_mode, promoted_nominal_mode;
4257 int unsignedp;
4258 /* Total space needed so far for args on the stack,
4259 given as a constant and a tree-expression. */
4260 struct args_size stack_args_size;
4261 tree fntype = TREE_TYPE (fndecl);
4262 tree fnargs = DECL_ARGUMENTS (fndecl);
4263 /* This is used for the arg pointer when referring to stack args. */
4264 rtx internal_arg_pointer;
4265 /* This is a dummy PARM_DECL that we used for the function result if
4266 the function returns a structure. */
4267 tree function_result_decl = 0;
4268 #ifdef SETUP_INCOMING_VARARGS
4269 int varargs_setup = 0;
4270 #endif
4271 rtx conversion_insns = 0;
4273 /* Nonzero if the last arg is named `__builtin_va_alist',
4274 which is used on some machines for old-fashioned non-ANSI varargs.h;
4275 this should be stuck onto the stack as if it had arrived there. */
4276 int hide_last_arg
4277 = (current_function_varargs
4278 && fnargs
4279 && (parm = tree_last (fnargs)) != 0
4280 && DECL_NAME (parm)
4281 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4282 "__builtin_va_alist")));
4284 /* Nonzero if function takes extra anonymous args.
4285 This means the last named arg must be on the stack
4286 right before the anonymous ones. */
4287 int stdarg
4288 = (TYPE_ARG_TYPES (fntype) != 0
4289 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4290 != void_type_node));
4292 current_function_stdarg = stdarg;
4294 /* If the reg that the virtual arg pointer will be translated into is
4295 not a fixed reg or is the stack pointer, make a copy of the virtual
4296 arg pointer, and address parms via the copy. The frame pointer is
4297 considered fixed even though it is not marked as such.
4299 The second time through, simply use ap to avoid generating rtx. */
4301 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4302 || ! (fixed_regs[ARG_POINTER_REGNUM]
4303 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4304 && ! second_time)
4305 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4306 else
4307 internal_arg_pointer = virtual_incoming_args_rtx;
4308 current_function_internal_arg_pointer = internal_arg_pointer;
4310 stack_args_size.constant = 0;
4311 stack_args_size.var = 0;
4313 /* If struct value address is treated as the first argument, make it so. */
4314 if (aggregate_value_p (DECL_RESULT (fndecl))
4315 && ! current_function_returns_pcc_struct
4316 && struct_value_incoming_rtx == 0)
4318 tree type = build_pointer_type (TREE_TYPE (fntype));
4320 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4322 DECL_ARG_TYPE (function_result_decl) = type;
4323 TREE_CHAIN (function_result_decl) = fnargs;
4324 fnargs = function_result_decl;
4327 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4328 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4329 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4331 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4332 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4333 #else
4334 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4335 #endif
4337 /* We haven't yet found an argument that we must push and pretend the
4338 caller did. */
4339 current_function_pretend_args_size = 0;
4341 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4343 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4344 struct args_size stack_offset;
4345 struct args_size arg_size;
4346 int passed_pointer = 0;
4347 int did_conversion = 0;
4348 tree passed_type = DECL_ARG_TYPE (parm);
4349 tree nominal_type = TREE_TYPE (parm);
4350 int pretend_named;
4352 /* Set LAST_NAMED if this is last named arg before some
4353 anonymous args. */
4354 int last_named = ((TREE_CHAIN (parm) == 0
4355 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4356 && (stdarg || current_function_varargs));
4357 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4358 most machines, if this is a varargs/stdarg function, then we treat
4359 the last named arg as if it were anonymous too. */
4360 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4362 if (TREE_TYPE (parm) == error_mark_node
4363 /* This can happen after weird syntax errors
4364 or if an enum type is defined among the parms. */
4365 || TREE_CODE (parm) != PARM_DECL
4366 || passed_type == NULL)
4368 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4369 = gen_rtx_MEM (BLKmode, const0_rtx);
4370 TREE_USED (parm) = 1;
4371 continue;
4374 /* For varargs.h function, save info about regs and stack space
4375 used by the individual args, not including the va_alist arg. */
4376 if (hide_last_arg && last_named)
4377 current_function_args_info = args_so_far;
4379 /* Find mode of arg as it is passed, and mode of arg
4380 as it should be during execution of this function. */
4381 passed_mode = TYPE_MODE (passed_type);
4382 nominal_mode = TYPE_MODE (nominal_type);
4384 /* If the parm's mode is VOID, its value doesn't matter,
4385 and avoid the usual things like emit_move_insn that could crash. */
4386 if (nominal_mode == VOIDmode)
4388 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4389 continue;
4392 /* If the parm is to be passed as a transparent union, use the
4393 type of the first field for the tests below. We have already
4394 verified that the modes are the same. */
4395 if (DECL_TRANSPARENT_UNION (parm)
4396 || TYPE_TRANSPARENT_UNION (passed_type))
4397 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4399 /* See if this arg was passed by invisible reference. It is if
4400 it is an object whose size depends on the contents of the
4401 object itself or if the machine requires these objects be passed
4402 that way. */
4404 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4405 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4406 || TREE_ADDRESSABLE (passed_type)
4407 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4408 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4409 passed_type, named_arg)
4410 #endif
4413 passed_type = nominal_type = build_pointer_type (passed_type);
4414 passed_pointer = 1;
4415 passed_mode = nominal_mode = Pmode;
4418 promoted_mode = passed_mode;
4420 #ifdef PROMOTE_FUNCTION_ARGS
4421 /* Compute the mode in which the arg is actually extended to. */
4422 unsignedp = TREE_UNSIGNED (passed_type);
4423 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4424 #endif
4426 /* Let machine desc say which reg (if any) the parm arrives in.
4427 0 means it arrives on the stack. */
4428 #ifdef FUNCTION_INCOMING_ARG
4429 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4430 passed_type, named_arg);
4431 #else
4432 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4433 passed_type, named_arg);
4434 #endif
4436 if (entry_parm == 0)
4437 promoted_mode = passed_mode;
4439 #ifdef SETUP_INCOMING_VARARGS
4440 /* If this is the last named parameter, do any required setup for
4441 varargs or stdargs. We need to know about the case of this being an
4442 addressable type, in which case we skip the registers it
4443 would have arrived in.
4445 For stdargs, LAST_NAMED will be set for two parameters, the one that
4446 is actually the last named, and the dummy parameter. We only
4447 want to do this action once.
4449 Also, indicate when RTL generation is to be suppressed. */
4450 if (last_named && !varargs_setup)
4452 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4453 current_function_pretend_args_size,
4454 second_time);
4455 varargs_setup = 1;
4457 #endif
4459 /* Determine parm's home in the stack,
4460 in case it arrives in the stack or we should pretend it did.
4462 Compute the stack position and rtx where the argument arrives
4463 and its size.
4465 There is one complexity here: If this was a parameter that would
4466 have been passed in registers, but wasn't only because it is
4467 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4468 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4469 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4470 0 as it was the previous time. */
4472 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4473 locate_and_pad_parm (promoted_mode, passed_type,
4474 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4476 #else
4477 #ifdef FUNCTION_INCOMING_ARG
4478 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4479 passed_type,
4480 pretend_named) != 0,
4481 #else
4482 FUNCTION_ARG (args_so_far, promoted_mode,
4483 passed_type,
4484 pretend_named) != 0,
4485 #endif
4486 #endif
4487 fndecl, &stack_args_size, &stack_offset, &arg_size);
4489 if (! second_time)
4491 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4493 if (offset_rtx == const0_rtx)
4494 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4495 else
4496 stack_parm = gen_rtx_MEM (promoted_mode,
4497 gen_rtx_PLUS (Pmode,
4498 internal_arg_pointer,
4499 offset_rtx));
4501 /* If this is a memory ref that contains aggregate components,
4502 mark it as such for cse and loop optimize. Likewise if it
4503 is readonly. */
4504 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4505 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4506 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4509 /* If this parameter was passed both in registers and in the stack,
4510 use the copy on the stack. */
4511 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4512 entry_parm = 0;
4514 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4515 /* If this parm was passed part in regs and part in memory,
4516 pretend it arrived entirely in memory
4517 by pushing the register-part onto the stack.
4519 In the special case of a DImode or DFmode that is split,
4520 we could put it together in a pseudoreg directly,
4521 but for now that's not worth bothering with. */
4523 if (entry_parm)
4525 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4526 passed_type, named_arg);
4528 if (nregs > 0)
4530 current_function_pretend_args_size
4531 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4532 / (PARM_BOUNDARY / BITS_PER_UNIT)
4533 * (PARM_BOUNDARY / BITS_PER_UNIT));
4535 if (! second_time)
4537 /* Handle calls that pass values in multiple non-contiguous
4538 locations. The Irix 6 ABI has examples of this. */
4539 if (GET_CODE (entry_parm) == PARALLEL)
4540 emit_group_store (validize_mem (stack_parm), entry_parm,
4541 int_size_in_bytes (TREE_TYPE (parm)),
4542 (TYPE_ALIGN (TREE_TYPE (parm))
4543 / BITS_PER_UNIT));
4544 else
4545 move_block_from_reg (REGNO (entry_parm),
4546 validize_mem (stack_parm), nregs,
4547 int_size_in_bytes (TREE_TYPE (parm)));
4549 entry_parm = stack_parm;
4552 #endif
4554 /* If we didn't decide this parm came in a register,
4555 by default it came on the stack. */
4556 if (entry_parm == 0)
4557 entry_parm = stack_parm;
4559 /* Record permanently how this parm was passed. */
4560 if (! second_time)
4561 DECL_INCOMING_RTL (parm) = entry_parm;
4563 /* If there is actually space on the stack for this parm,
4564 count it in stack_args_size; otherwise set stack_parm to 0
4565 to indicate there is no preallocated stack slot for the parm. */
4567 if (entry_parm == stack_parm
4568 || (GET_CODE (entry_parm) == PARALLEL
4569 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4570 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4571 /* On some machines, even if a parm value arrives in a register
4572 there is still an (uninitialized) stack slot allocated for it.
4574 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4575 whether this parameter already has a stack slot allocated,
4576 because an arg block exists only if current_function_args_size
4577 is larger than some threshold, and we haven't calculated that
4578 yet. So, for now, we just assume that stack slots never exist
4579 in this case. */
4580 || REG_PARM_STACK_SPACE (fndecl) > 0
4581 #endif
4584 stack_args_size.constant += arg_size.constant;
4585 if (arg_size.var)
4586 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4588 else
4589 /* No stack slot was pushed for this parm. */
4590 stack_parm = 0;
4592 /* Update info on where next arg arrives in registers. */
4594 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4595 passed_type, named_arg);
4597 /* If this is our second time through, we are done with this parm. */
4598 if (second_time)
4599 continue;
4601 /* If we can't trust the parm stack slot to be aligned enough
4602 for its ultimate type, don't use that slot after entry.
4603 We'll make another stack slot, if we need one. */
4605 int thisparm_boundary
4606 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4608 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4609 stack_parm = 0;
4612 /* If parm was passed in memory, and we need to convert it on entry,
4613 don't store it back in that same slot. */
4614 if (entry_parm != 0
4615 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4616 stack_parm = 0;
4618 #if 0
4619 /* Now adjust STACK_PARM to the mode and precise location
4620 where this parameter should live during execution,
4621 if we discover that it must live in the stack during execution.
4622 To make debuggers happier on big-endian machines, we store
4623 the value in the last bytes of the space available. */
4625 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4626 && stack_parm != 0)
4628 rtx offset_rtx;
4630 if (BYTES_BIG_ENDIAN
4631 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4632 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4633 - GET_MODE_SIZE (nominal_mode));
4635 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4636 if (offset_rtx == const0_rtx)
4637 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4638 else
4639 stack_parm = gen_rtx_MEM (nominal_mode,
4640 gen_rtx_PLUS (Pmode,
4641 internal_arg_pointer,
4642 offset_rtx));
4644 /* If this is a memory ref that contains aggregate components,
4645 mark it as such for cse and loop optimize. */
4646 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4648 #endif /* 0 */
4650 #ifdef STACK_REGS
4651 /* We need this "use" info, because the gcc-register->stack-register
4652 converter in reg-stack.c needs to know which registers are active
4653 at the start of the function call. The actual parameter loading
4654 instructions are not always available then anymore, since they might
4655 have been optimised away. */
4657 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4658 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4659 #endif
4661 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4662 in the mode in which it arrives.
4663 STACK_PARM is an RTX for a stack slot where the parameter can live
4664 during the function (in case we want to put it there).
4665 STACK_PARM is 0 if no stack slot was pushed for it.
4667 Now output code if necessary to convert ENTRY_PARM to
4668 the type in which this function declares it,
4669 and store that result in an appropriate place,
4670 which may be a pseudo reg, may be STACK_PARM,
4671 or may be a local stack slot if STACK_PARM is 0.
4673 Set DECL_RTL to that place. */
4675 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4677 /* If a BLKmode arrives in registers, copy it to a stack slot.
4678 Handle calls that pass values in multiple non-contiguous
4679 locations. The Irix 6 ABI has examples of this. */
4680 if (GET_CODE (entry_parm) == REG
4681 || GET_CODE (entry_parm) == PARALLEL)
4683 int size_stored
4684 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4685 UNITS_PER_WORD);
4687 /* Note that we will be storing an integral number of words.
4688 So we have to be careful to ensure that we allocate an
4689 integral number of words. We do this below in the
4690 assign_stack_local if space was not allocated in the argument
4691 list. If it was, this will not work if PARM_BOUNDARY is not
4692 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4693 if it becomes a problem. */
4695 if (stack_parm == 0)
4697 stack_parm
4698 = assign_stack_local (GET_MODE (entry_parm),
4699 size_stored, 0);
4701 /* If this is a memory ref that contains aggregate
4702 components, mark it as such for cse and loop optimize. */
4703 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4706 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4707 abort ();
4709 if (TREE_READONLY (parm))
4710 RTX_UNCHANGING_P (stack_parm) = 1;
4712 /* Handle calls that pass values in multiple non-contiguous
4713 locations. The Irix 6 ABI has examples of this. */
4714 if (GET_CODE (entry_parm) == PARALLEL)
4715 emit_group_store (validize_mem (stack_parm), entry_parm,
4716 int_size_in_bytes (TREE_TYPE (parm)),
4717 (TYPE_ALIGN (TREE_TYPE (parm))
4718 / BITS_PER_UNIT));
4719 else
4720 move_block_from_reg (REGNO (entry_parm),
4721 validize_mem (stack_parm),
4722 size_stored / UNITS_PER_WORD,
4723 int_size_in_bytes (TREE_TYPE (parm)));
4725 DECL_RTL (parm) = stack_parm;
4727 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4728 && ! DECL_INLINE (fndecl))
4729 /* layout_decl may set this. */
4730 || TREE_ADDRESSABLE (parm)
4731 || TREE_SIDE_EFFECTS (parm)
4732 /* If -ffloat-store specified, don't put explicit
4733 float variables into registers. */
4734 || (flag_float_store
4735 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4736 /* Always assign pseudo to structure return or item passed
4737 by invisible reference. */
4738 || passed_pointer || parm == function_result_decl)
4740 /* Store the parm in a pseudoregister during the function, but we
4741 may need to do it in a wider mode. */
4743 register rtx parmreg;
4744 int regno, regnoi = 0, regnor = 0;
4746 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4748 promoted_nominal_mode
4749 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4751 parmreg = gen_reg_rtx (promoted_nominal_mode);
4752 mark_user_reg (parmreg);
4754 /* If this was an item that we received a pointer to, set DECL_RTL
4755 appropriately. */
4756 if (passed_pointer)
4758 DECL_RTL (parm)
4759 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4760 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4762 else
4763 DECL_RTL (parm) = parmreg;
4765 /* Copy the value into the register. */
4766 if (nominal_mode != passed_mode
4767 || promoted_nominal_mode != promoted_mode)
4769 int save_tree_used;
4770 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4771 mode, by the caller. We now have to convert it to
4772 NOMINAL_MODE, if different. However, PARMREG may be in
4773 a different mode than NOMINAL_MODE if it is being stored
4774 promoted.
4776 If ENTRY_PARM is a hard register, it might be in a register
4777 not valid for operating in its mode (e.g., an odd-numbered
4778 register for a DFmode). In that case, moves are the only
4779 thing valid, so we can't do a convert from there. This
4780 occurs when the calling sequence allow such misaligned
4781 usages.
4783 In addition, the conversion may involve a call, which could
4784 clobber parameters which haven't been copied to pseudo
4785 registers yet. Therefore, we must first copy the parm to
4786 a pseudo reg here, and save the conversion until after all
4787 parameters have been moved. */
4789 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4791 emit_move_insn (tempreg, validize_mem (entry_parm));
4793 push_to_sequence (conversion_insns);
4794 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4796 /* TREE_USED gets set erroneously during expand_assignment. */
4797 save_tree_used = TREE_USED (parm);
4798 expand_assignment (parm,
4799 make_tree (nominal_type, tempreg), 0, 0);
4800 TREE_USED (parm) = save_tree_used;
4801 conversion_insns = get_insns ();
4802 did_conversion = 1;
4803 end_sequence ();
4805 else
4806 emit_move_insn (parmreg, validize_mem (entry_parm));
4808 /* If we were passed a pointer but the actual value
4809 can safely live in a register, put it in one. */
4810 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4811 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4812 && ! DECL_INLINE (fndecl))
4813 /* layout_decl may set this. */
4814 || TREE_ADDRESSABLE (parm)
4815 || TREE_SIDE_EFFECTS (parm)
4816 /* If -ffloat-store specified, don't put explicit
4817 float variables into registers. */
4818 || (flag_float_store
4819 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4821 /* We can't use nominal_mode, because it will have been set to
4822 Pmode above. We must use the actual mode of the parm. */
4823 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4824 mark_user_reg (parmreg);
4825 emit_move_insn (parmreg, DECL_RTL (parm));
4826 DECL_RTL (parm) = parmreg;
4827 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4828 now the parm. */
4829 stack_parm = 0;
4831 #ifdef FUNCTION_ARG_CALLEE_COPIES
4832 /* If we are passed an arg by reference and it is our responsibility
4833 to make a copy, do it now.
4834 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4835 original argument, so we must recreate them in the call to
4836 FUNCTION_ARG_CALLEE_COPIES. */
4837 /* ??? Later add code to handle the case that if the argument isn't
4838 modified, don't do the copy. */
4840 else if (passed_pointer
4841 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4842 TYPE_MODE (DECL_ARG_TYPE (parm)),
4843 DECL_ARG_TYPE (parm),
4844 named_arg)
4845 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4847 rtx copy;
4848 tree type = DECL_ARG_TYPE (parm);
4850 /* This sequence may involve a library call perhaps clobbering
4851 registers that haven't been copied to pseudos yet. */
4853 push_to_sequence (conversion_insns);
4855 if (TYPE_SIZE (type) == 0
4856 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4857 /* This is a variable sized object. */
4858 copy = gen_rtx_MEM (BLKmode,
4859 allocate_dynamic_stack_space
4860 (expr_size (parm), NULL_RTX,
4861 TYPE_ALIGN (type)));
4862 else
4863 copy = assign_stack_temp (TYPE_MODE (type),
4864 int_size_in_bytes (type), 1);
4865 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4866 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4868 store_expr (parm, copy, 0);
4869 emit_move_insn (parmreg, XEXP (copy, 0));
4870 if (current_function_check_memory_usage)
4871 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4872 XEXP (copy, 0), Pmode,
4873 GEN_INT (int_size_in_bytes (type)),
4874 TYPE_MODE (sizetype),
4875 GEN_INT (MEMORY_USE_RW),
4876 TYPE_MODE (integer_type_node));
4877 conversion_insns = get_insns ();
4878 did_conversion = 1;
4879 end_sequence ();
4881 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4883 /* In any case, record the parm's desired stack location
4884 in case we later discover it must live in the stack.
4886 If it is a COMPLEX value, store the stack location for both
4887 halves. */
4889 if (GET_CODE (parmreg) == CONCAT)
4890 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4891 else
4892 regno = REGNO (parmreg);
4894 if (regno >= max_parm_reg)
4896 rtx *new;
4897 int old_max_parm_reg = max_parm_reg;
4899 /* It's slow to expand this one register at a time,
4900 but it's also rare and we need max_parm_reg to be
4901 precisely correct. */
4902 max_parm_reg = regno + 1;
4903 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4904 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4905 old_max_parm_reg * sizeof (rtx));
4906 bzero ((char *) (new + old_max_parm_reg),
4907 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4908 parm_reg_stack_loc = new;
4911 if (GET_CODE (parmreg) == CONCAT)
4913 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4915 regnor = REGNO (gen_realpart (submode, parmreg));
4916 regnoi = REGNO (gen_imagpart (submode, parmreg));
4918 if (stack_parm != 0)
4920 parm_reg_stack_loc[regnor]
4921 = gen_realpart (submode, stack_parm);
4922 parm_reg_stack_loc[regnoi]
4923 = gen_imagpart (submode, stack_parm);
4925 else
4927 parm_reg_stack_loc[regnor] = 0;
4928 parm_reg_stack_loc[regnoi] = 0;
4931 else
4932 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4934 /* Mark the register as eliminable if we did no conversion
4935 and it was copied from memory at a fixed offset,
4936 and the arg pointer was not copied to a pseudo-reg.
4937 If the arg pointer is a pseudo reg or the offset formed
4938 an invalid address, such memory-equivalences
4939 as we make here would screw up life analysis for it. */
4940 if (nominal_mode == passed_mode
4941 && ! did_conversion
4942 && stack_parm != 0
4943 && GET_CODE (stack_parm) == MEM
4944 && stack_offset.var == 0
4945 && reg_mentioned_p (virtual_incoming_args_rtx,
4946 XEXP (stack_parm, 0)))
4948 rtx linsn = get_last_insn ();
4949 rtx sinsn, set;
4951 /* Mark complex types separately. */
4952 if (GET_CODE (parmreg) == CONCAT)
4953 /* Scan backwards for the set of the real and
4954 imaginary parts. */
4955 for (sinsn = linsn; sinsn != 0;
4956 sinsn = prev_nonnote_insn (sinsn))
4958 set = single_set (sinsn);
4959 if (set != 0
4960 && SET_DEST (set) == regno_reg_rtx [regnoi])
4961 REG_NOTES (sinsn)
4962 = gen_rtx_EXPR_LIST (REG_EQUIV,
4963 parm_reg_stack_loc[regnoi],
4964 REG_NOTES (sinsn));
4965 else if (set != 0
4966 && SET_DEST (set) == regno_reg_rtx [regnor])
4967 REG_NOTES (sinsn)
4968 = gen_rtx_EXPR_LIST (REG_EQUIV,
4969 parm_reg_stack_loc[regnor],
4970 REG_NOTES (sinsn));
4972 else if ((set = single_set (linsn)) != 0
4973 && SET_DEST (set) == parmreg)
4974 REG_NOTES (linsn)
4975 = gen_rtx_EXPR_LIST (REG_EQUIV,
4976 stack_parm, REG_NOTES (linsn));
4979 /* For pointer data type, suggest pointer register. */
4980 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4981 mark_reg_pointer (parmreg,
4982 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4983 / BITS_PER_UNIT));
4985 else
4987 /* Value must be stored in the stack slot STACK_PARM
4988 during function execution. */
4990 if (promoted_mode != nominal_mode)
4992 /* Conversion is required. */
4993 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4995 emit_move_insn (tempreg, validize_mem (entry_parm));
4997 push_to_sequence (conversion_insns);
4998 entry_parm = convert_to_mode (nominal_mode, tempreg,
4999 TREE_UNSIGNED (TREE_TYPE (parm)));
5000 if (stack_parm)
5002 /* ??? This may need a big-endian conversion on sparc64. */
5003 stack_parm = change_address (stack_parm, nominal_mode,
5004 NULL_RTX);
5006 conversion_insns = get_insns ();
5007 did_conversion = 1;
5008 end_sequence ();
5011 if (entry_parm != stack_parm)
5013 if (stack_parm == 0)
5015 stack_parm
5016 = assign_stack_local (GET_MODE (entry_parm),
5017 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5018 /* If this is a memory ref that contains aggregate components,
5019 mark it as such for cse and loop optimize. */
5020 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
5023 if (promoted_mode != nominal_mode)
5025 push_to_sequence (conversion_insns);
5026 emit_move_insn (validize_mem (stack_parm),
5027 validize_mem (entry_parm));
5028 conversion_insns = get_insns ();
5029 end_sequence ();
5031 else
5032 emit_move_insn (validize_mem (stack_parm),
5033 validize_mem (entry_parm));
5035 if (current_function_check_memory_usage)
5037 push_to_sequence (conversion_insns);
5038 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
5039 XEXP (stack_parm, 0), Pmode,
5040 GEN_INT (GET_MODE_SIZE (GET_MODE
5041 (entry_parm))),
5042 TYPE_MODE (sizetype),
5043 GEN_INT (MEMORY_USE_RW),
5044 TYPE_MODE (integer_type_node));
5046 conversion_insns = get_insns ();
5047 end_sequence ();
5049 DECL_RTL (parm) = stack_parm;
5052 /* If this "parameter" was the place where we are receiving the
5053 function's incoming structure pointer, set up the result. */
5054 if (parm == function_result_decl)
5056 tree result = DECL_RESULT (fndecl);
5057 tree restype = TREE_TYPE (result);
5059 DECL_RTL (result)
5060 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5062 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
5063 AGGREGATE_TYPE_P (restype));
5066 if (TREE_THIS_VOLATILE (parm))
5067 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
5068 if (TREE_READONLY (parm))
5069 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
5072 /* Output all parameter conversion instructions (possibly including calls)
5073 now that all parameters have been copied out of hard registers. */
5074 emit_insns (conversion_insns);
5076 last_parm_insn = get_last_insn ();
5078 current_function_args_size = stack_args_size.constant;
5080 /* Adjust function incoming argument size for alignment and
5081 minimum length. */
5083 #ifdef REG_PARM_STACK_SPACE
5084 #ifndef MAYBE_REG_PARM_STACK_SPACE
5085 current_function_args_size = MAX (current_function_args_size,
5086 REG_PARM_STACK_SPACE (fndecl));
5087 #endif
5088 #endif
5090 #ifdef STACK_BOUNDARY
5091 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5093 current_function_args_size
5094 = ((current_function_args_size + STACK_BYTES - 1)
5095 / STACK_BYTES) * STACK_BYTES;
5096 #endif
5098 #ifdef ARGS_GROW_DOWNWARD
5099 current_function_arg_offset_rtx
5100 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5101 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
5102 size_int (-stack_args_size.constant)),
5103 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5104 #else
5105 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5106 #endif
5108 /* See how many bytes, if any, of its args a function should try to pop
5109 on return. */
5111 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5112 current_function_args_size);
5114 /* For stdarg.h function, save info about
5115 regs and stack space used by the named args. */
5117 if (!hide_last_arg)
5118 current_function_args_info = args_so_far;
5120 /* Set the rtx used for the function return value. Put this in its
5121 own variable so any optimizers that need this information don't have
5122 to include tree.h. Do this here so it gets done when an inlined
5123 function gets output. */
5125 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5128 /* Indicate whether REGNO is an incoming argument to the current function
5129 that was promoted to a wider mode. If so, return the RTX for the
5130 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5131 that REGNO is promoted from and whether the promotion was signed or
5132 unsigned. */
5134 #ifdef PROMOTE_FUNCTION_ARGS
5137 promoted_input_arg (regno, pmode, punsignedp)
5138 int regno;
5139 enum machine_mode *pmode;
5140 int *punsignedp;
5142 tree arg;
5144 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5145 arg = TREE_CHAIN (arg))
5146 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5147 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5148 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5150 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5151 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5153 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5154 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5155 && mode != DECL_MODE (arg))
5157 *pmode = DECL_MODE (arg);
5158 *punsignedp = unsignedp;
5159 return DECL_INCOMING_RTL (arg);
5163 return 0;
5166 #endif
5168 /* Compute the size and offset from the start of the stacked arguments for a
5169 parm passed in mode PASSED_MODE and with type TYPE.
5171 INITIAL_OFFSET_PTR points to the current offset into the stacked
5172 arguments.
5174 The starting offset and size for this parm are returned in *OFFSET_PTR
5175 and *ARG_SIZE_PTR, respectively.
5177 IN_REGS is non-zero if the argument will be passed in registers. It will
5178 never be set if REG_PARM_STACK_SPACE is not defined.
5180 FNDECL is the function in which the argument was defined.
5182 There are two types of rounding that are done. The first, controlled by
5183 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5184 list to be aligned to the specific boundary (in bits). This rounding
5185 affects the initial and starting offsets, but not the argument size.
5187 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5188 optionally rounds the size of the parm to PARM_BOUNDARY. The
5189 initial offset is not affected by this rounding, while the size always
5190 is and the starting offset may be. */
5192 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5193 initial_offset_ptr is positive because locate_and_pad_parm's
5194 callers pass in the total size of args so far as
5195 initial_offset_ptr. arg_size_ptr is always positive.*/
5197 void
5198 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5199 initial_offset_ptr, offset_ptr, arg_size_ptr)
5200 enum machine_mode passed_mode;
5201 tree type;
5202 int in_regs;
5203 tree fndecl ATTRIBUTE_UNUSED;
5204 struct args_size *initial_offset_ptr;
5205 struct args_size *offset_ptr;
5206 struct args_size *arg_size_ptr;
5208 tree sizetree
5209 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5210 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5211 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5213 #ifdef REG_PARM_STACK_SPACE
5214 /* If we have found a stack parm before we reach the end of the
5215 area reserved for registers, skip that area. */
5216 if (! in_regs)
5218 int reg_parm_stack_space = 0;
5220 #ifdef MAYBE_REG_PARM_STACK_SPACE
5221 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5222 #else
5223 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5224 #endif
5225 if (reg_parm_stack_space > 0)
5227 if (initial_offset_ptr->var)
5229 initial_offset_ptr->var
5230 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5231 size_int (reg_parm_stack_space));
5232 initial_offset_ptr->constant = 0;
5234 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5235 initial_offset_ptr->constant = reg_parm_stack_space;
5238 #endif /* REG_PARM_STACK_SPACE */
5240 arg_size_ptr->var = 0;
5241 arg_size_ptr->constant = 0;
5243 #ifdef ARGS_GROW_DOWNWARD
5244 if (initial_offset_ptr->var)
5246 offset_ptr->constant = 0;
5247 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5248 initial_offset_ptr->var);
5250 else
5252 offset_ptr->constant = - initial_offset_ptr->constant;
5253 offset_ptr->var = 0;
5255 if (where_pad != none
5256 && (TREE_CODE (sizetree) != INTEGER_CST
5257 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5258 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5259 SUB_PARM_SIZE (*offset_ptr, sizetree);
5260 if (where_pad != downward)
5261 pad_to_arg_alignment (offset_ptr, boundary);
5262 if (initial_offset_ptr->var)
5264 arg_size_ptr->var = size_binop (MINUS_EXPR,
5265 size_binop (MINUS_EXPR,
5266 integer_zero_node,
5267 initial_offset_ptr->var),
5268 offset_ptr->var);
5270 else
5272 arg_size_ptr->constant = (- initial_offset_ptr->constant
5273 - offset_ptr->constant);
5275 #else /* !ARGS_GROW_DOWNWARD */
5276 pad_to_arg_alignment (initial_offset_ptr, boundary);
5277 *offset_ptr = *initial_offset_ptr;
5279 #ifdef PUSH_ROUNDING
5280 if (passed_mode != BLKmode)
5281 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5282 #endif
5284 /* Pad_below needs the pre-rounded size to know how much to pad below
5285 so this must be done before rounding up. */
5286 if (where_pad == downward
5287 /* However, BLKmode args passed in regs have their padding done elsewhere.
5288 The stack slot must be able to hold the entire register. */
5289 && !(in_regs && passed_mode == BLKmode))
5290 pad_below (offset_ptr, passed_mode, sizetree);
5292 if (where_pad != none
5293 && (TREE_CODE (sizetree) != INTEGER_CST
5294 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5295 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5297 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5298 #endif /* ARGS_GROW_DOWNWARD */
5301 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5302 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5304 static void
5305 pad_to_arg_alignment (offset_ptr, boundary)
5306 struct args_size *offset_ptr;
5307 int boundary;
5309 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5311 if (boundary > BITS_PER_UNIT)
5313 if (offset_ptr->var)
5315 offset_ptr->var =
5316 #ifdef ARGS_GROW_DOWNWARD
5317 round_down
5318 #else
5319 round_up
5320 #endif
5321 (ARGS_SIZE_TREE (*offset_ptr),
5322 boundary / BITS_PER_UNIT);
5323 offset_ptr->constant = 0; /*?*/
5325 else
5326 offset_ptr->constant =
5327 #ifdef ARGS_GROW_DOWNWARD
5328 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5329 #else
5330 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5331 #endif
5335 #ifndef ARGS_GROW_DOWNWARD
5336 static void
5337 pad_below (offset_ptr, passed_mode, sizetree)
5338 struct args_size *offset_ptr;
5339 enum machine_mode passed_mode;
5340 tree sizetree;
5342 if (passed_mode != BLKmode)
5344 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5345 offset_ptr->constant
5346 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5347 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5348 - GET_MODE_SIZE (passed_mode));
5350 else
5352 if (TREE_CODE (sizetree) != INTEGER_CST
5353 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5355 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5356 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5357 /* Add it in. */
5358 ADD_PARM_SIZE (*offset_ptr, s2);
5359 SUB_PARM_SIZE (*offset_ptr, sizetree);
5363 #endif
5365 #ifdef ARGS_GROW_DOWNWARD
5366 static tree
5367 round_down (value, divisor)
5368 tree value;
5369 int divisor;
5371 return size_binop (MULT_EXPR,
5372 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5373 size_int (divisor));
5375 #endif
5377 /* Walk the tree of blocks describing the binding levels within a function
5378 and warn about uninitialized variables.
5379 This is done after calling flow_analysis and before global_alloc
5380 clobbers the pseudo-regs to hard regs. */
5382 void
5383 uninitialized_vars_warning (block)
5384 tree block;
5386 register tree decl, sub;
5387 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5389 if (TREE_CODE (decl) == VAR_DECL
5390 /* These warnings are unreliable for and aggregates
5391 because assigning the fields one by one can fail to convince
5392 flow.c that the entire aggregate was initialized.
5393 Unions are troublesome because members may be shorter. */
5394 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5395 && DECL_RTL (decl) != 0
5396 && GET_CODE (DECL_RTL (decl)) == REG
5397 /* Global optimizations can make it difficult to determine if a
5398 particular variable has been initialized. However, a VAR_DECL
5399 with a nonzero DECL_INITIAL had an initializer, so do not
5400 claim it is potentially uninitialized.
5402 We do not care about the actual value in DECL_INITIAL, so we do
5403 not worry that it may be a dangling pointer. */
5404 && DECL_INITIAL (decl) == NULL_TREE
5405 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5406 warning_with_decl (decl,
5407 "`%s' might be used uninitialized in this function");
5408 if (TREE_CODE (decl) == VAR_DECL
5409 && DECL_RTL (decl) != 0
5410 && GET_CODE (DECL_RTL (decl)) == REG
5411 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5412 warning_with_decl (decl,
5413 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5415 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5416 uninitialized_vars_warning (sub);
5419 /* Do the appropriate part of uninitialized_vars_warning
5420 but for arguments instead of local variables. */
5422 void
5423 setjmp_args_warning ()
5425 register tree decl;
5426 for (decl = DECL_ARGUMENTS (current_function_decl);
5427 decl; decl = TREE_CHAIN (decl))
5428 if (DECL_RTL (decl) != 0
5429 && GET_CODE (DECL_RTL (decl)) == REG
5430 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5431 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5434 /* If this function call setjmp, put all vars into the stack
5435 unless they were declared `register'. */
5437 void
5438 setjmp_protect (block)
5439 tree block;
5441 register tree decl, sub;
5442 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5443 if ((TREE_CODE (decl) == VAR_DECL
5444 || TREE_CODE (decl) == PARM_DECL)
5445 && DECL_RTL (decl) != 0
5446 && (GET_CODE (DECL_RTL (decl)) == REG
5447 || (GET_CODE (DECL_RTL (decl)) == MEM
5448 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5449 /* If this variable came from an inline function, it must be
5450 that its life doesn't overlap the setjmp. If there was a
5451 setjmp in the function, it would already be in memory. We
5452 must exclude such variable because their DECL_RTL might be
5453 set to strange things such as virtual_stack_vars_rtx. */
5454 && ! DECL_FROM_INLINE (decl)
5455 && (
5456 #ifdef NON_SAVING_SETJMP
5457 /* If longjmp doesn't restore the registers,
5458 don't put anything in them. */
5459 NON_SAVING_SETJMP
5461 #endif
5462 ! DECL_REGISTER (decl)))
5463 put_var_into_stack (decl);
5464 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5465 setjmp_protect (sub);
5468 /* Like the previous function, but for args instead of local variables. */
5470 void
5471 setjmp_protect_args ()
5473 register tree decl;
5474 for (decl = DECL_ARGUMENTS (current_function_decl);
5475 decl; decl = TREE_CHAIN (decl))
5476 if ((TREE_CODE (decl) == VAR_DECL
5477 || TREE_CODE (decl) == PARM_DECL)
5478 && DECL_RTL (decl) != 0
5479 && (GET_CODE (DECL_RTL (decl)) == REG
5480 || (GET_CODE (DECL_RTL (decl)) == MEM
5481 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5482 && (
5483 /* If longjmp doesn't restore the registers,
5484 don't put anything in them. */
5485 #ifdef NON_SAVING_SETJMP
5486 NON_SAVING_SETJMP
5488 #endif
5489 ! DECL_REGISTER (decl)))
5490 put_var_into_stack (decl);
5493 /* Return the context-pointer register corresponding to DECL,
5494 or 0 if it does not need one. */
5497 lookup_static_chain (decl)
5498 tree decl;
5500 tree context = decl_function_context (decl);
5501 tree link;
5503 if (context == 0
5504 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5505 return 0;
5507 /* We treat inline_function_decl as an alias for the current function
5508 because that is the inline function whose vars, types, etc.
5509 are being merged into the current function.
5510 See expand_inline_function. */
5511 if (context == current_function_decl || context == inline_function_decl)
5512 return virtual_stack_vars_rtx;
5514 for (link = context_display; link; link = TREE_CHAIN (link))
5515 if (TREE_PURPOSE (link) == context)
5516 return RTL_EXPR_RTL (TREE_VALUE (link));
5518 abort ();
5521 /* Convert a stack slot address ADDR for variable VAR
5522 (from a containing function)
5523 into an address valid in this function (using a static chain). */
5526 fix_lexical_addr (addr, var)
5527 rtx addr;
5528 tree var;
5530 rtx basereg;
5531 HOST_WIDE_INT displacement;
5532 tree context = decl_function_context (var);
5533 struct function *fp;
5534 rtx base = 0;
5536 /* If this is the present function, we need not do anything. */
5537 if (context == current_function_decl || context == inline_function_decl)
5538 return addr;
5540 for (fp = outer_function_chain; fp; fp = fp->next)
5541 if (fp->decl == context)
5542 break;
5544 if (fp == 0)
5545 abort ();
5547 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5548 addr = XEXP (XEXP (addr, 0), 0);
5550 /* Decode given address as base reg plus displacement. */
5551 if (GET_CODE (addr) == REG)
5552 basereg = addr, displacement = 0;
5553 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5554 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5555 else
5556 abort ();
5558 /* We accept vars reached via the containing function's
5559 incoming arg pointer and via its stack variables pointer. */
5560 if (basereg == fp->internal_arg_pointer)
5562 /* If reached via arg pointer, get the arg pointer value
5563 out of that function's stack frame.
5565 There are two cases: If a separate ap is needed, allocate a
5566 slot in the outer function for it and dereference it that way.
5567 This is correct even if the real ap is actually a pseudo.
5568 Otherwise, just adjust the offset from the frame pointer to
5569 compensate. */
5571 #ifdef NEED_SEPARATE_AP
5572 rtx addr;
5574 if (fp->arg_pointer_save_area == 0)
5575 fp->arg_pointer_save_area
5576 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5578 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5579 addr = memory_address (Pmode, addr);
5581 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5582 #else
5583 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5584 base = lookup_static_chain (var);
5585 #endif
5588 else if (basereg == virtual_stack_vars_rtx)
5590 /* This is the same code as lookup_static_chain, duplicated here to
5591 avoid an extra call to decl_function_context. */
5592 tree link;
5594 for (link = context_display; link; link = TREE_CHAIN (link))
5595 if (TREE_PURPOSE (link) == context)
5597 base = RTL_EXPR_RTL (TREE_VALUE (link));
5598 break;
5602 if (base == 0)
5603 abort ();
5605 /* Use same offset, relative to appropriate static chain or argument
5606 pointer. */
5607 return plus_constant (base, displacement);
5610 /* Return the address of the trampoline for entering nested fn FUNCTION.
5611 If necessary, allocate a trampoline (in the stack frame)
5612 and emit rtl to initialize its contents (at entry to this function). */
5615 trampoline_address (function)
5616 tree function;
5618 tree link;
5619 tree rtlexp;
5620 rtx tramp;
5621 struct function *fp;
5622 tree fn_context;
5624 /* Find an existing trampoline and return it. */
5625 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5626 if (TREE_PURPOSE (link) == function)
5627 return
5628 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5630 for (fp = outer_function_chain; fp; fp = fp->next)
5631 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5632 if (TREE_PURPOSE (link) == function)
5634 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5635 function);
5636 return round_trampoline_addr (tramp);
5639 /* None exists; we must make one. */
5641 /* Find the `struct function' for the function containing FUNCTION. */
5642 fp = 0;
5643 fn_context = decl_function_context (function);
5644 if (fn_context != current_function_decl
5645 && fn_context != inline_function_decl)
5646 for (fp = outer_function_chain; fp; fp = fp->next)
5647 if (fp->decl == fn_context)
5648 break;
5650 /* Allocate run-time space for this trampoline
5651 (usually in the defining function's stack frame). */
5652 #ifdef ALLOCATE_TRAMPOLINE
5653 tramp = ALLOCATE_TRAMPOLINE (fp);
5654 #else
5655 /* If rounding needed, allocate extra space
5656 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5657 #ifdef TRAMPOLINE_ALIGNMENT
5658 #define TRAMPOLINE_REAL_SIZE \
5659 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5660 #else
5661 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5662 #endif
5663 if (fp != 0)
5664 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5665 else
5666 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5667 #endif
5669 /* Record the trampoline for reuse and note it for later initialization
5670 by expand_function_end. */
5671 if (fp != 0)
5673 push_obstacks (fp->function_maybepermanent_obstack,
5674 fp->function_maybepermanent_obstack);
5675 rtlexp = make_node (RTL_EXPR);
5676 RTL_EXPR_RTL (rtlexp) = tramp;
5677 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5678 pop_obstacks ();
5680 else
5682 /* Make the RTL_EXPR node temporary, not momentary, so that the
5683 trampoline_list doesn't become garbage. */
5684 int momentary = suspend_momentary ();
5685 rtlexp = make_node (RTL_EXPR);
5686 resume_momentary (momentary);
5688 RTL_EXPR_RTL (rtlexp) = tramp;
5689 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5692 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5693 return round_trampoline_addr (tramp);
5696 /* Given a trampoline address,
5697 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5699 static rtx
5700 round_trampoline_addr (tramp)
5701 rtx tramp;
5703 #ifdef TRAMPOLINE_ALIGNMENT
5704 /* Round address up to desired boundary. */
5705 rtx temp = gen_reg_rtx (Pmode);
5706 temp = expand_binop (Pmode, add_optab, tramp,
5707 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5708 temp, 0, OPTAB_LIB_WIDEN);
5709 tramp = expand_binop (Pmode, and_optab, temp,
5710 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5711 temp, 0, OPTAB_LIB_WIDEN);
5712 #endif
5713 return tramp;
5716 /* The functions identify_blocks and reorder_blocks provide a way to
5717 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5718 duplicate portions of the RTL code. Call identify_blocks before
5719 changing the RTL, and call reorder_blocks after. */
5721 /* Put all this function's BLOCK nodes including those that are chained
5722 onto the first block into a vector, and return it.
5723 Also store in each NOTE for the beginning or end of a block
5724 the index of that block in the vector.
5725 The arguments are BLOCK, the chain of top-level blocks of the function,
5726 and INSNS, the insn chain of the function. */
5728 tree *
5729 identify_blocks (block, insns)
5730 tree block;
5731 rtx insns;
5733 int n_blocks;
5734 tree *block_vector;
5735 int *block_stack;
5736 int depth = 0;
5737 int next_block_number = 1;
5738 int current_block_number = 1;
5739 rtx insn;
5741 if (block == 0)
5742 return 0;
5744 n_blocks = all_blocks (block, 0);
5745 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5746 block_stack = (int *) alloca (n_blocks * sizeof (int));
5748 all_blocks (block, block_vector);
5750 for (insn = insns; insn; insn = NEXT_INSN (insn))
5751 if (GET_CODE (insn) == NOTE)
5753 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5755 block_stack[depth++] = current_block_number;
5756 current_block_number = next_block_number;
5757 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5759 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5761 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5762 current_block_number = block_stack[--depth];
5766 if (n_blocks != next_block_number)
5767 abort ();
5769 return block_vector;
5772 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5773 and a revised instruction chain, rebuild the tree structure
5774 of BLOCK nodes to correspond to the new order of RTL.
5775 The new block tree is inserted below TOP_BLOCK.
5776 Returns the current top-level block. */
5778 tree
5779 reorder_blocks (block_vector, block, insns)
5780 tree *block_vector;
5781 tree block;
5782 rtx insns;
5784 tree current_block = block;
5785 rtx insn;
5787 if (block_vector == 0)
5788 return block;
5790 /* Prune the old trees away, so that it doesn't get in the way. */
5791 BLOCK_SUBBLOCKS (current_block) = 0;
5792 BLOCK_CHAIN (current_block) = 0;
5794 for (insn = insns; insn; insn = NEXT_INSN (insn))
5795 if (GET_CODE (insn) == NOTE)
5797 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5799 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5800 /* If we have seen this block before, copy it. */
5801 if (TREE_ASM_WRITTEN (block))
5802 block = copy_node (block);
5803 BLOCK_SUBBLOCKS (block) = 0;
5804 TREE_ASM_WRITTEN (block) = 1;
5805 BLOCK_SUPERCONTEXT (block) = current_block;
5806 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5807 BLOCK_SUBBLOCKS (current_block) = block;
5808 current_block = block;
5809 NOTE_SOURCE_FILE (insn) = 0;
5811 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5813 BLOCK_SUBBLOCKS (current_block)
5814 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5815 current_block = BLOCK_SUPERCONTEXT (current_block);
5816 NOTE_SOURCE_FILE (insn) = 0;
5820 BLOCK_SUBBLOCKS (current_block)
5821 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5822 return current_block;
5825 /* Reverse the order of elements in the chain T of blocks,
5826 and return the new head of the chain (old last element). */
5828 static tree
5829 blocks_nreverse (t)
5830 tree t;
5832 register tree prev = 0, decl, next;
5833 for (decl = t; decl; decl = next)
5835 next = BLOCK_CHAIN (decl);
5836 BLOCK_CHAIN (decl) = prev;
5837 prev = decl;
5839 return prev;
5842 /* Count the subblocks of the list starting with BLOCK, and list them
5843 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5844 blocks. */
5846 static int
5847 all_blocks (block, vector)
5848 tree block;
5849 tree *vector;
5851 int n_blocks = 0;
5853 while (block)
5855 TREE_ASM_WRITTEN (block) = 0;
5857 /* Record this block. */
5858 if (vector)
5859 vector[n_blocks] = block;
5861 ++n_blocks;
5863 /* Record the subblocks, and their subblocks... */
5864 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5865 vector ? vector + n_blocks : 0);
5866 block = BLOCK_CHAIN (block);
5869 return n_blocks;
5872 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5873 and initialize static variables for generating RTL for the statements
5874 of the function. */
5876 void
5877 init_function_start (subr, filename, line)
5878 tree subr;
5879 char *filename;
5880 int line;
5882 init_stmt_for_function ();
5884 cse_not_expected = ! optimize;
5886 /* Caller save not needed yet. */
5887 caller_save_needed = 0;
5889 /* No stack slots have been made yet. */
5890 stack_slot_list = 0;
5892 /* There is no stack slot for handling nonlocal gotos. */
5893 nonlocal_goto_handler_slots = 0;
5894 nonlocal_goto_stack_level = 0;
5896 /* No labels have been declared for nonlocal use. */
5897 nonlocal_labels = 0;
5898 nonlocal_goto_handler_labels = 0;
5900 /* No function calls so far in this function. */
5901 function_call_count = 0;
5903 /* No parm regs have been allocated.
5904 (This is important for output_inline_function.) */
5905 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5907 /* Initialize the RTL mechanism. */
5908 init_emit ();
5910 /* Initialize the queue of pending postincrement and postdecrements,
5911 and some other info in expr.c. */
5912 init_expr ();
5914 /* We haven't done register allocation yet. */
5915 reg_renumber = 0;
5917 init_const_rtx_hash_table ();
5919 current_function_name = (*decl_printable_name) (subr, 2);
5921 /* Nonzero if this is a nested function that uses a static chain. */
5923 current_function_needs_context
5924 = (decl_function_context (current_function_decl) != 0
5925 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5927 /* Set if a call to setjmp is seen. */
5928 current_function_calls_setjmp = 0;
5930 /* Set if a call to longjmp is seen. */
5931 current_function_calls_longjmp = 0;
5933 current_function_calls_alloca = 0;
5934 current_function_has_nonlocal_label = 0;
5935 current_function_has_nonlocal_goto = 0;
5936 current_function_contains_functions = 0;
5937 current_function_is_leaf = 0;
5938 current_function_sp_is_unchanging = 0;
5939 current_function_uses_only_leaf_regs = 0;
5940 current_function_has_computed_jump = 0;
5941 current_function_is_thunk = 0;
5943 current_function_returns_pcc_struct = 0;
5944 current_function_returns_struct = 0;
5945 current_function_epilogue_delay_list = 0;
5946 current_function_uses_const_pool = 0;
5947 current_function_uses_pic_offset_table = 0;
5948 current_function_cannot_inline = 0;
5950 /* We have not yet needed to make a label to jump to for tail-recursion. */
5951 tail_recursion_label = 0;
5953 /* We haven't had a need to make a save area for ap yet. */
5955 arg_pointer_save_area = 0;
5957 /* No stack slots allocated yet. */
5958 frame_offset = 0;
5960 /* No SAVE_EXPRs in this function yet. */
5961 save_expr_regs = 0;
5963 /* No RTL_EXPRs in this function yet. */
5964 rtl_expr_chain = 0;
5966 /* Set up to allocate temporaries. */
5967 init_temp_slots ();
5969 /* Within function body, compute a type's size as soon it is laid out. */
5970 immediate_size_expand++;
5972 /* We haven't made any trampolines for this function yet. */
5973 trampoline_list = 0;
5975 init_pending_stack_adjust ();
5976 inhibit_defer_pop = 0;
5978 current_function_outgoing_args_size = 0;
5980 /* Prevent ever trying to delete the first instruction of a function.
5981 Also tell final how to output a linenum before the function prologue.
5982 Note linenums could be missing, e.g. when compiling a Java .class file. */
5983 if (line > 0)
5984 emit_line_note (filename, line);
5986 /* Make sure first insn is a note even if we don't want linenums.
5987 This makes sure the first insn will never be deleted.
5988 Also, final expects a note to appear there. */
5989 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5991 /* Set flags used by final.c. */
5992 if (aggregate_value_p (DECL_RESULT (subr)))
5994 #ifdef PCC_STATIC_STRUCT_RETURN
5995 current_function_returns_pcc_struct = 1;
5996 #endif
5997 current_function_returns_struct = 1;
6000 /* Warn if this value is an aggregate type,
6001 regardless of which calling convention we are using for it. */
6002 if (warn_aggregate_return
6003 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6004 warning ("function returns an aggregate");
6006 current_function_returns_pointer
6007 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6009 /* Indicate that we need to distinguish between the return value of the
6010 present function and the return value of a function being called. */
6011 rtx_equal_function_value_matters = 1;
6013 /* Indicate that we have not instantiated virtual registers yet. */
6014 virtuals_instantiated = 0;
6016 /* Indicate we have no need of a frame pointer yet. */
6017 frame_pointer_needed = 0;
6019 /* By default assume not varargs or stdarg. */
6020 current_function_varargs = 0;
6021 current_function_stdarg = 0;
6024 /* Indicate that the current function uses extra args
6025 not explicitly mentioned in the argument list in any fashion. */
6027 void
6028 mark_varargs ()
6030 current_function_varargs = 1;
6033 /* Expand a call to __main at the beginning of a possible main function. */
6035 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6036 #undef HAS_INIT_SECTION
6037 #define HAS_INIT_SECTION
6038 #endif
6040 void
6041 expand_main_function ()
6043 #if !defined (HAS_INIT_SECTION)
6044 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6045 VOIDmode, 0);
6046 #endif /* not HAS_INIT_SECTION */
6049 extern struct obstack permanent_obstack;
6051 /* Start the RTL for a new function, and set variables used for
6052 emitting RTL.
6053 SUBR is the FUNCTION_DECL node.
6054 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6055 the function's parameters, which must be run at any return statement. */
6057 void
6058 expand_function_start (subr, parms_have_cleanups)
6059 tree subr;
6060 int parms_have_cleanups;
6062 register int i;
6063 tree tem;
6064 rtx last_ptr = NULL_RTX;
6066 /* Make sure volatile mem refs aren't considered
6067 valid operands of arithmetic insns. */
6068 init_recog_no_volatile ();
6070 /* Set this before generating any memory accesses. */
6071 current_function_check_memory_usage
6072 = (flag_check_memory_usage
6073 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6075 current_function_instrument_entry_exit
6076 = (flag_instrument_function_entry_exit
6077 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6079 /* If function gets a static chain arg, store it in the stack frame.
6080 Do this first, so it gets the first stack slot offset. */
6081 if (current_function_needs_context)
6083 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6085 /* Delay copying static chain if it is not a register to avoid
6086 conflicts with regs used for parameters. */
6087 if (! SMALL_REGISTER_CLASSES
6088 || GET_CODE (static_chain_incoming_rtx) == REG)
6089 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6092 /* If the parameters of this function need cleaning up, get a label
6093 for the beginning of the code which executes those cleanups. This must
6094 be done before doing anything with return_label. */
6095 if (parms_have_cleanups)
6096 cleanup_label = gen_label_rtx ();
6097 else
6098 cleanup_label = 0;
6100 /* Make the label for return statements to jump to, if this machine
6101 does not have a one-instruction return and uses an epilogue,
6102 or if it returns a structure, or if it has parm cleanups. */
6103 #ifdef HAVE_return
6104 if (cleanup_label == 0 && HAVE_return
6105 && ! current_function_instrument_entry_exit
6106 && ! current_function_returns_pcc_struct
6107 && ! (current_function_returns_struct && ! optimize))
6108 return_label = 0;
6109 else
6110 return_label = gen_label_rtx ();
6111 #else
6112 return_label = gen_label_rtx ();
6113 #endif
6115 /* Initialize rtx used to return the value. */
6116 /* Do this before assign_parms so that we copy the struct value address
6117 before any library calls that assign parms might generate. */
6119 /* Decide whether to return the value in memory or in a register. */
6120 if (aggregate_value_p (DECL_RESULT (subr)))
6122 /* Returning something that won't go in a register. */
6123 register rtx value_address = 0;
6125 #ifdef PCC_STATIC_STRUCT_RETURN
6126 if (current_function_returns_pcc_struct)
6128 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6129 value_address = assemble_static_space (size);
6131 else
6132 #endif
6134 /* Expect to be passed the address of a place to store the value.
6135 If it is passed as an argument, assign_parms will take care of
6136 it. */
6137 if (struct_value_incoming_rtx)
6139 value_address = gen_reg_rtx (Pmode);
6140 emit_move_insn (value_address, struct_value_incoming_rtx);
6143 if (value_address)
6145 DECL_RTL (DECL_RESULT (subr))
6146 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6147 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6148 AGGREGATE_TYPE_P (TREE_TYPE
6149 (DECL_RESULT
6150 (subr))));
6153 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6154 /* If return mode is void, this decl rtl should not be used. */
6155 DECL_RTL (DECL_RESULT (subr)) = 0;
6156 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6158 /* If function will end with cleanup code for parms,
6159 compute the return values into a pseudo reg,
6160 which we will copy into the true return register
6161 after the cleanups are done. */
6163 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6165 #ifdef PROMOTE_FUNCTION_RETURN
6166 tree type = TREE_TYPE (DECL_RESULT (subr));
6167 int unsignedp = TREE_UNSIGNED (type);
6169 mode = promote_mode (type, mode, &unsignedp, 1);
6170 #endif
6172 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6174 else
6175 /* Scalar, returned in a register. */
6177 #ifdef FUNCTION_OUTGOING_VALUE
6178 DECL_RTL (DECL_RESULT (subr))
6179 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6180 #else
6181 DECL_RTL (DECL_RESULT (subr))
6182 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6183 #endif
6185 /* Mark this reg as the function's return value. */
6186 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6188 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6189 /* Needed because we may need to move this to memory
6190 in case it's a named return value whose address is taken. */
6191 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6195 /* Initialize rtx for parameters and local variables.
6196 In some cases this requires emitting insns. */
6198 assign_parms (subr, 0);
6200 /* Copy the static chain now if it wasn't a register. The delay is to
6201 avoid conflicts with the parameter passing registers. */
6203 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6204 if (GET_CODE (static_chain_incoming_rtx) != REG)
6205 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6207 /* The following was moved from init_function_start.
6208 The move is supposed to make sdb output more accurate. */
6209 /* Indicate the beginning of the function body,
6210 as opposed to parm setup. */
6211 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6213 /* If doing stupid allocation, mark parms as born here. */
6215 if (GET_CODE (get_last_insn ()) != NOTE)
6216 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6217 parm_birth_insn = get_last_insn ();
6219 if (obey_regdecls)
6221 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6222 use_variable (regno_reg_rtx[i]);
6224 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6225 use_variable (current_function_internal_arg_pointer);
6228 context_display = 0;
6229 if (current_function_needs_context)
6231 /* Fetch static chain values for containing functions. */
6232 tem = decl_function_context (current_function_decl);
6233 /* If not doing stupid register allocation copy the static chain
6234 pointer into a pseudo. If we have small register classes, copy
6235 the value from memory if static_chain_incoming_rtx is a REG. If
6236 we do stupid register allocation, we use the stack address
6237 generated above. */
6238 if (tem && ! obey_regdecls)
6240 /* If the static chain originally came in a register, put it back
6241 there, then move it out in the next insn. The reason for
6242 this peculiar code is to satisfy function integration. */
6243 if (SMALL_REGISTER_CLASSES
6244 && GET_CODE (static_chain_incoming_rtx) == REG)
6245 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6246 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6249 while (tem)
6251 tree rtlexp = make_node (RTL_EXPR);
6253 RTL_EXPR_RTL (rtlexp) = last_ptr;
6254 context_display = tree_cons (tem, rtlexp, context_display);
6255 tem = decl_function_context (tem);
6256 if (tem == 0)
6257 break;
6258 /* Chain thru stack frames, assuming pointer to next lexical frame
6259 is found at the place we always store it. */
6260 #ifdef FRAME_GROWS_DOWNWARD
6261 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6262 #endif
6263 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6264 memory_address (Pmode, last_ptr)));
6266 /* If we are not optimizing, ensure that we know that this
6267 piece of context is live over the entire function. */
6268 if (! optimize)
6269 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6270 save_expr_regs);
6274 if (current_function_instrument_entry_exit)
6276 rtx fun = DECL_RTL (current_function_decl);
6277 if (GET_CODE (fun) == MEM)
6278 fun = XEXP (fun, 0);
6279 else
6280 abort ();
6281 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6282 fun, Pmode,
6283 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6285 hard_frame_pointer_rtx),
6286 Pmode);
6289 /* After the display initializations is where the tail-recursion label
6290 should go, if we end up needing one. Ensure we have a NOTE here
6291 since some things (like trampolines) get placed before this. */
6292 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6294 /* Evaluate now the sizes of any types declared among the arguments. */
6295 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6297 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6298 EXPAND_MEMORY_USE_BAD);
6299 /* Flush the queue in case this parameter declaration has
6300 side-effects. */
6301 emit_queue ();
6304 /* Make sure there is a line number after the function entry setup code. */
6305 force_next_line_note ();
6308 /* Generate RTL for the end of the current function.
6309 FILENAME and LINE are the current position in the source file.
6311 It is up to language-specific callers to do cleanups for parameters--
6312 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6314 void
6315 expand_function_end (filename, line, end_bindings)
6316 char *filename;
6317 int line;
6318 int end_bindings;
6320 register int i;
6321 tree link;
6323 #ifdef TRAMPOLINE_TEMPLATE
6324 static rtx initial_trampoline;
6325 #endif
6327 #ifdef NON_SAVING_SETJMP
6328 /* Don't put any variables in registers if we call setjmp
6329 on a machine that fails to restore the registers. */
6330 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6332 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6333 setjmp_protect (DECL_INITIAL (current_function_decl));
6335 setjmp_protect_args ();
6337 #endif
6339 /* Save the argument pointer if a save area was made for it. */
6340 if (arg_pointer_save_area)
6342 /* arg_pointer_save_area may not be a valid memory address, so we
6343 have to check it and fix it if necessary. */
6344 rtx seq;
6345 start_sequence ();
6346 emit_move_insn (validize_mem (arg_pointer_save_area),
6347 virtual_incoming_args_rtx);
6348 seq = gen_sequence ();
6349 end_sequence ();
6350 emit_insn_before (seq, tail_recursion_reentry);
6353 /* Initialize any trampolines required by this function. */
6354 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6356 tree function = TREE_PURPOSE (link);
6357 rtx context = lookup_static_chain (function);
6358 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6359 #ifdef TRAMPOLINE_TEMPLATE
6360 rtx blktramp;
6361 #endif
6362 rtx seq;
6364 #ifdef TRAMPOLINE_TEMPLATE
6365 /* First make sure this compilation has a template for
6366 initializing trampolines. */
6367 if (initial_trampoline == 0)
6369 end_temporary_allocation ();
6370 initial_trampoline
6371 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6372 resume_temporary_allocation ();
6374 #endif
6376 /* Generate insns to initialize the trampoline. */
6377 start_sequence ();
6378 tramp = round_trampoline_addr (XEXP (tramp, 0));
6379 #ifdef TRAMPOLINE_TEMPLATE
6380 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6381 emit_block_move (blktramp, initial_trampoline,
6382 GEN_INT (TRAMPOLINE_SIZE),
6383 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6384 #endif
6385 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6386 seq = get_insns ();
6387 end_sequence ();
6389 /* Put those insns at entry to the containing function (this one). */
6390 emit_insns_before (seq, tail_recursion_reentry);
6393 /* If we are doing stack checking and this function makes calls,
6394 do a stack probe at the start of the function to ensure we have enough
6395 space for another stack frame. */
6396 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6398 rtx insn, seq;
6400 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6401 if (GET_CODE (insn) == CALL_INSN)
6403 start_sequence ();
6404 probe_stack_range (STACK_CHECK_PROTECT,
6405 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6406 seq = get_insns ();
6407 end_sequence ();
6408 emit_insns_before (seq, tail_recursion_reentry);
6409 break;
6413 /* Warn about unused parms if extra warnings were specified. */
6414 if (warn_unused && extra_warnings)
6416 tree decl;
6418 for (decl = DECL_ARGUMENTS (current_function_decl);
6419 decl; decl = TREE_CHAIN (decl))
6420 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6421 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6422 warning_with_decl (decl, "unused parameter `%s'");
6425 /* Delete handlers for nonlocal gotos if nothing uses them. */
6426 if (nonlocal_goto_handler_slots != 0
6427 && ! current_function_has_nonlocal_label)
6428 delete_handlers ();
6430 /* End any sequences that failed to be closed due to syntax errors. */
6431 while (in_sequence_p ())
6432 end_sequence ();
6434 /* Outside function body, can't compute type's actual size
6435 until next function's body starts. */
6436 immediate_size_expand--;
6438 /* If doing stupid register allocation,
6439 mark register parms as dying here. */
6441 if (obey_regdecls)
6443 rtx tem;
6444 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6445 use_variable (regno_reg_rtx[i]);
6447 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6449 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6451 use_variable (XEXP (tem, 0));
6452 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6455 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6456 use_variable (current_function_internal_arg_pointer);
6459 clear_pending_stack_adjust ();
6460 do_pending_stack_adjust ();
6462 /* Mark the end of the function body.
6463 If control reaches this insn, the function can drop through
6464 without returning a value. */
6465 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6467 /* Must mark the last line number note in the function, so that the test
6468 coverage code can avoid counting the last line twice. This just tells
6469 the code to ignore the immediately following line note, since there
6470 already exists a copy of this note somewhere above. This line number
6471 note is still needed for debugging though, so we can't delete it. */
6472 if (flag_test_coverage)
6473 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6475 /* Output a linenumber for the end of the function.
6476 SDB depends on this. */
6477 emit_line_note_force (filename, line);
6479 /* Output the label for the actual return from the function,
6480 if one is expected. This happens either because a function epilogue
6481 is used instead of a return instruction, or because a return was done
6482 with a goto in order to run local cleanups, or because of pcc-style
6483 structure returning. */
6485 if (return_label)
6486 emit_label (return_label);
6488 /* C++ uses this. */
6489 if (end_bindings)
6490 expand_end_bindings (0, 0, 0);
6492 /* Now handle any leftover exception regions that may have been
6493 created for the parameters. */
6495 rtx last = get_last_insn ();
6496 rtx label;
6498 expand_leftover_cleanups ();
6500 /* If the above emitted any code, may sure we jump around it. */
6501 if (last != get_last_insn ())
6503 label = gen_label_rtx ();
6504 last = emit_jump_insn_after (gen_jump (label), last);
6505 last = emit_barrier_after (last);
6506 emit_label (label);
6510 if (current_function_instrument_entry_exit)
6512 rtx fun = DECL_RTL (current_function_decl);
6513 if (GET_CODE (fun) == MEM)
6514 fun = XEXP (fun, 0);
6515 else
6516 abort ();
6517 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6518 fun, Pmode,
6519 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6521 hard_frame_pointer_rtx),
6522 Pmode);
6525 /* If we had calls to alloca, and this machine needs
6526 an accurate stack pointer to exit the function,
6527 insert some code to save and restore the stack pointer. */
6528 #ifdef EXIT_IGNORE_STACK
6529 if (! EXIT_IGNORE_STACK)
6530 #endif
6531 if (current_function_calls_alloca)
6533 rtx tem = 0;
6535 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6536 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6539 /* If scalar return value was computed in a pseudo-reg,
6540 copy that to the hard return register. */
6541 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6542 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6543 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6544 >= FIRST_PSEUDO_REGISTER))
6546 rtx real_decl_result;
6548 #ifdef FUNCTION_OUTGOING_VALUE
6549 real_decl_result
6550 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6551 current_function_decl);
6552 #else
6553 real_decl_result
6554 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6555 current_function_decl);
6556 #endif
6557 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6558 /* If this is a BLKmode structure being returned in registers, then use
6559 the mode computed in expand_return. */
6560 if (GET_MODE (real_decl_result) == BLKmode)
6561 PUT_MODE (real_decl_result,
6562 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6563 emit_move_insn (real_decl_result,
6564 DECL_RTL (DECL_RESULT (current_function_decl)));
6565 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6567 /* The delay slot scheduler assumes that current_function_return_rtx
6568 holds the hard register containing the return value, not a temporary
6569 pseudo. */
6570 current_function_return_rtx = real_decl_result;
6573 /* If returning a structure, arrange to return the address of the value
6574 in a place where debuggers expect to find it.
6576 If returning a structure PCC style,
6577 the caller also depends on this value.
6578 And current_function_returns_pcc_struct is not necessarily set. */
6579 if (current_function_returns_struct
6580 || current_function_returns_pcc_struct)
6582 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6583 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6584 #ifdef FUNCTION_OUTGOING_VALUE
6585 rtx outgoing
6586 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6587 current_function_decl);
6588 #else
6589 rtx outgoing
6590 = FUNCTION_VALUE (build_pointer_type (type),
6591 current_function_decl);
6592 #endif
6594 /* Mark this as a function return value so integrate will delete the
6595 assignment and USE below when inlining this function. */
6596 REG_FUNCTION_VALUE_P (outgoing) = 1;
6598 emit_move_insn (outgoing, value_address);
6599 use_variable (outgoing);
6602 /* If this is an implementation of __throw, do what's necessary to
6603 communicate between __builtin_eh_return and the epilogue. */
6604 expand_eh_return ();
6606 /* Output a return insn if we are using one.
6607 Otherwise, let the rtl chain end here, to drop through
6608 into the epilogue. */
6610 #ifdef HAVE_return
6611 if (HAVE_return)
6613 emit_jump_insn (gen_return ());
6614 emit_barrier ();
6616 #endif
6618 /* Fix up any gotos that jumped out to the outermost
6619 binding level of the function.
6620 Must follow emitting RETURN_LABEL. */
6622 /* If you have any cleanups to do at this point,
6623 and they need to create temporary variables,
6624 then you will lose. */
6625 expand_fixups (get_insns ());
6628 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6630 static int *prologue;
6631 static int *epilogue;
6633 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6634 or a single insn). */
6636 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6637 static int *
6638 record_insns (insns)
6639 rtx insns;
6641 int *vec;
6643 if (GET_CODE (insns) == SEQUENCE)
6645 int len = XVECLEN (insns, 0);
6646 vec = (int *) oballoc ((len + 1) * sizeof (int));
6647 vec[len] = 0;
6648 while (--len >= 0)
6649 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6651 else
6653 vec = (int *) oballoc (2 * sizeof (int));
6654 vec[0] = INSN_UID (insns);
6655 vec[1] = 0;
6657 return vec;
6660 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6662 static int
6663 contains (insn, vec)
6664 rtx insn;
6665 int *vec;
6667 register int i, j;
6669 if (GET_CODE (insn) == INSN
6670 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6672 int count = 0;
6673 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6674 for (j = 0; vec[j]; j++)
6675 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6676 count++;
6677 return count;
6679 else
6681 for (j = 0; vec[j]; j++)
6682 if (INSN_UID (insn) == vec[j])
6683 return 1;
6685 return 0;
6687 #endif /* HAVE_prologue || HAVE_epilogue */
6689 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6690 this into place with notes indicating where the prologue ends and where
6691 the epilogue begins. Update the basic block information when possible. */
6693 void
6694 thread_prologue_and_epilogue_insns (f)
6695 rtx f ATTRIBUTE_UNUSED;
6697 int insertted = 0;
6699 prologue = 0;
6700 #ifdef HAVE_prologue
6701 if (HAVE_prologue)
6703 rtx seq;
6705 start_sequence ();
6706 seq = gen_prologue();
6707 emit_insn (seq);
6709 /* Retain a map of the prologue insns. */
6710 if (GET_CODE (seq) != SEQUENCE)
6711 seq = get_insns ();
6712 prologue = record_insns (seq);
6714 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6715 seq = gen_sequence ();
6716 end_sequence ();
6718 /* If optimization is off, and perhaps in an empty function,
6719 the entry block will have no successors. */
6720 if (ENTRY_BLOCK_PTR->succ)
6722 /* Can't deal with multiple successsors of the entry block. */
6723 if (ENTRY_BLOCK_PTR->succ->succ_next)
6724 abort ();
6726 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6727 insertted = 1;
6729 else
6730 emit_insn_after (seq, f);
6732 #endif
6734 epilogue = 0;
6735 #ifdef HAVE_epilogue
6736 if (HAVE_epilogue)
6738 edge e;
6739 basic_block bb = 0;
6740 rtx tail = get_last_insn ();
6742 /* ??? This is gastly. If function returns were not done via uses,
6743 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6744 and all of this uglyness would go away. */
6746 switch (optimize)
6748 default:
6749 /* If the exit block has no non-fake predecessors, we don't
6750 need an epilogue. Furthermore, only pay attention to the
6751 fallthru predecessors; if (conditional) return insns were
6752 generated, by definition we do not need to emit epilogue
6753 insns. */
6755 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6756 if ((e->flags & EDGE_FAKE) == 0
6757 && (e->flags & EDGE_FALLTHRU) != 0)
6758 break;
6759 if (e == NULL)
6760 break;
6762 /* We can't handle multiple epilogues -- if one is needed,
6763 we won't be able to place it multiple times.
6765 ??? Fix epilogue expanders to not assume they are the
6766 last thing done compiling the function. Either that
6767 or copy_rtx each insn.
6769 ??? Blah, it's not a simple expression to assert that
6770 we've exactly one fallthru exit edge. */
6772 bb = e->src;
6773 tail = bb->end;
6775 /* ??? If the last insn of the basic block is a jump, then we
6776 are creating a new basic block. Wimp out and leave these
6777 insns outside any block. */
6778 if (GET_CODE (tail) == JUMP_INSN)
6779 bb = 0;
6781 /* FALLTHRU */
6782 case 0:
6784 rtx prev, seq, first_use;
6786 /* Move the USE insns at the end of a function onto a list. */
6787 prev = tail;
6788 if (GET_CODE (prev) == BARRIER
6789 || GET_CODE (prev) == NOTE)
6790 prev = prev_nonnote_insn (prev);
6792 first_use = 0;
6793 if (prev
6794 && GET_CODE (prev) == INSN
6795 && GET_CODE (PATTERN (prev)) == USE)
6797 /* If the end of the block is the use, grab hold of something
6798 else so that we emit barriers etc in the right place. */
6799 if (prev == tail)
6802 tail = PREV_INSN (tail);
6803 while (GET_CODE (tail) == INSN
6804 && GET_CODE (PATTERN (tail)) == USE);
6809 rtx use = prev;
6810 prev = prev_nonnote_insn (prev);
6812 remove_insn (use);
6813 if (first_use)
6815 NEXT_INSN (use) = first_use;
6816 PREV_INSN (first_use) = use;
6818 else
6819 NEXT_INSN (use) = NULL_RTX;
6820 first_use = use;
6822 while (prev
6823 && GET_CODE (prev) == INSN
6824 && GET_CODE (PATTERN (prev)) == USE);
6827 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6828 epilogue insns, the USE insns at the end of a function,
6829 the jump insn that returns, and then a BARRIER. */
6831 if (GET_CODE (tail) != BARRIER)
6833 prev = next_nonnote_insn (tail);
6834 if (!prev || GET_CODE (prev) != BARRIER)
6835 emit_barrier_after (tail);
6838 seq = gen_epilogue ();
6839 prev = tail;
6840 tail = emit_jump_insn_after (seq, tail);
6842 /* Insert the USE insns immediately before the return insn, which
6843 must be the last instruction emitted in the sequence. */
6844 if (first_use)
6845 emit_insns_before (first_use, tail);
6846 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6848 /* Update the tail of the basic block. */
6849 if (bb)
6850 bb->end = tail;
6852 /* Retain a map of the epilogue insns. */
6853 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6857 #endif
6859 if (insertted)
6860 commit_edge_insertions ();
6863 /* Reposition the prologue-end and epilogue-begin notes after instruction
6864 scheduling and delayed branch scheduling. */
6866 void
6867 reposition_prologue_and_epilogue_notes (f)
6868 rtx f ATTRIBUTE_UNUSED;
6870 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6871 /* Reposition the prologue and epilogue notes. */
6872 if (n_basic_blocks)
6874 int len;
6876 if (prologue)
6878 register rtx insn, note = 0;
6880 /* Scan from the beginning until we reach the last prologue insn.
6881 We apparently can't depend on basic_block_{head,end} after
6882 reorg has run. */
6883 for (len = 0; prologue[len]; len++)
6885 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6887 if (GET_CODE (insn) == NOTE)
6889 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6890 note = insn;
6892 else if ((len -= contains (insn, prologue)) == 0)
6894 rtx next;
6895 /* Find the prologue-end note if we haven't already, and
6896 move it to just after the last prologue insn. */
6897 if (note == 0)
6899 for (note = insn; (note = NEXT_INSN (note));)
6900 if (GET_CODE (note) == NOTE
6901 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6902 break;
6905 next = NEXT_INSN (note);
6907 /* Whether or not we can depend on BLOCK_HEAD,
6908 attempt to keep it up-to-date. */
6909 if (BLOCK_HEAD (0) == note)
6910 BLOCK_HEAD (0) = next;
6912 remove_insn (note);
6913 add_insn_after (note, insn);
6918 if (epilogue)
6920 register rtx insn, note = 0;
6922 /* Scan from the end until we reach the first epilogue insn.
6923 We apparently can't depend on basic_block_{head,end} after
6924 reorg has run. */
6925 for (len = 0; epilogue[len]; len++)
6927 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6929 if (GET_CODE (insn) == NOTE)
6931 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6932 note = insn;
6934 else if ((len -= contains (insn, epilogue)) == 0)
6936 /* Find the epilogue-begin note if we haven't already, and
6937 move it to just before the first epilogue insn. */
6938 if (note == 0)
6940 for (note = insn; (note = PREV_INSN (note));)
6941 if (GET_CODE (note) == NOTE
6942 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6943 break;
6946 /* Whether or not we can depend on BLOCK_HEAD,
6947 attempt to keep it up-to-date. */
6948 if (n_basic_blocks
6949 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6950 BLOCK_HEAD (n_basic_blocks-1) = note;
6952 remove_insn (note);
6953 add_insn_before (note, insn);
6958 #endif /* HAVE_prologue or HAVE_epilogue */