Update comment.
[official-gcc.git] / gcc / integrate.c
blob6a6643ab77fd82c063b6f9c6fc4cff26f7c608cd
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 #include <stdio.h>
24 #include "config.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "insn-config.h"
29 #include "insn-flags.h"
30 #include "expr.h"
31 #include "output.h"
32 #include "integrate.h"
33 #include "real.h"
34 #include "function.h"
35 #include "bytecode.h"
37 #include "obstack.h"
38 #define obstack_chunk_alloc xmalloc
39 #define obstack_chunk_free free
41 extern struct obstack *function_maybepermanent_obstack;
43 extern tree pushdecl ();
44 extern tree poplevel ();
46 /* Similar, but round to the next highest integer that meets the
47 alignment. */
48 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50 /* Default max number of insns a function can have and still be inline.
51 This is overridden on RISC machines. */
52 #ifndef INTEGRATE_THRESHOLD
53 #define INTEGRATE_THRESHOLD(DECL) \
54 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
55 #endif
57 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
58 static void finish_inline PROTO((tree, rtx));
59 static void adjust_copied_decl_tree PROTO((tree));
60 static tree copy_decl_list PROTO((tree));
61 static tree copy_decl_tree PROTO((tree));
62 static void copy_decl_rtls PROTO((tree));
63 static void save_constants PROTO((rtx *));
64 static void note_modified_parmregs PROTO((rtx, rtx));
65 static rtx copy_for_inline PROTO((rtx));
66 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
67 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
68 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
69 static void restore_constants PROTO((rtx *));
70 static void set_block_origin_self PROTO((tree));
71 static void set_decl_origin_self PROTO((tree));
72 static void set_block_abstract_flags PROTO((tree, int));
74 void set_decl_abstract_flags PROTO((tree, int));
76 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
77 is safe and reasonable to integrate into other functions.
78 Nonzero means value is a warning message with a single %s
79 for the function's name. */
81 char *
82 function_cannot_inline_p (fndecl)
83 register tree fndecl;
85 register rtx insn;
86 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
87 int max_insns = INTEGRATE_THRESHOLD (fndecl);
88 register int ninsns = 0;
89 register tree parms;
91 /* No inlines with varargs. `grokdeclarator' gives a warning
92 message about that if `inline' is specified. This code
93 it put in to catch the volunteers. */
94 if ((last && TREE_VALUE (last) != void_type_node)
95 || current_function_varargs)
96 return "varargs function cannot be inline";
98 if (current_function_calls_alloca)
99 return "function using alloca cannot be inline";
101 if (current_function_contains_functions)
102 return "function with nested functions cannot be inline";
104 /* If its not even close, don't even look. */
105 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
106 return "function too large to be inline";
108 #if 0
109 /* Large stacks are OK now that inlined functions can share them. */
110 /* Don't inline functions with large stack usage,
111 since they can make other recursive functions burn up stack. */
112 if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
113 return "function stack frame for inlining";
114 #endif
116 #if 0
117 /* Don't inline functions which do not specify a function prototype and
118 have BLKmode argument or take the address of a parameter. */
119 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
121 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
122 TREE_ADDRESSABLE (parms) = 1;
123 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
124 return "no prototype, and parameter address used; cannot be inline";
126 #endif
128 /* We can't inline functions that return structures
129 the old-fashioned PCC way, copying into a static block. */
130 if (current_function_returns_pcc_struct)
131 return "inline functions not supported for this return value type";
133 /* We can't inline functions that return BLKmode structures in registers. */
134 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
135 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
136 return "inline functions not supported for this return value type";
138 /* We can't inline functions that return structures of varying size. */
139 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
140 return "function with varying-size return value cannot be inline";
142 /* Cannot inline a function with a varying size argument or one that
143 receives a transparent union. */
144 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
146 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
147 return "function with varying-size parameter cannot be inline";
148 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
149 return "function with transparent unit parameter cannot be inline";
152 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
154 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
155 insn = NEXT_INSN (insn))
157 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
158 ninsns++;
161 if (ninsns >= max_insns)
162 return "function too large to be inline";
165 /* We cannot inline this function if forced_labels is non-zero. This
166 implies that a label in this function was used as an initializer.
167 Because labels can not be duplicated, all labels in the function
168 will be renamed when it is inlined. However, there is no way to find
169 and fix all variables initialized with addresses of labels in this
170 function, hence inlining is impossible. */
172 if (forced_labels)
173 return "function with label addresses used in initializers cannot inline";
175 /* We cannot inline a nested function that jumps to a nonlocal label. */
176 if (current_function_has_nonlocal_goto)
177 return "function with nonlocal goto cannot be inline";
179 return 0;
182 /* Variables used within save_for_inline. */
184 /* Mapping from old pseudo-register to new pseudo-registers.
185 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
186 It is allocated in `save_for_inline' and `expand_inline_function',
187 and deallocated on exit from each of those routines. */
188 static rtx *reg_map;
190 /* Mapping from old code-labels to new code-labels.
191 The first element of this map is label_map[min_labelno].
192 It is allocated in `save_for_inline' and `expand_inline_function',
193 and deallocated on exit from each of those routines. */
194 static rtx *label_map;
196 /* Mapping from old insn uid's to copied insns.
197 It is allocated in `save_for_inline' and `expand_inline_function',
198 and deallocated on exit from each of those routines. */
199 static rtx *insn_map;
201 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
202 Zero for a reg that isn't a parm's home.
203 Only reg numbers less than max_parm_reg are mapped here. */
204 static tree *parmdecl_map;
206 /* Keep track of first pseudo-register beyond those that are parms. */
207 static int max_parm_reg;
209 /* When an insn is being copied by copy_for_inline,
210 this is nonzero if we have copied an ASM_OPERANDS.
211 In that case, it is the original input-operand vector. */
212 static rtvec orig_asm_operands_vector;
214 /* When an insn is being copied by copy_for_inline,
215 this is nonzero if we have copied an ASM_OPERANDS.
216 In that case, it is the copied input-operand vector. */
217 static rtvec copy_asm_operands_vector;
219 /* Likewise, this is the copied constraints vector. */
220 static rtvec copy_asm_constraints_vector;
222 /* In save_for_inline, nonzero if past the parm-initialization insns. */
223 static int in_nonparm_insns;
225 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
226 needed to save FNDECL's insns and info for future inline expansion. */
228 static rtx
229 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
230 tree fndecl;
231 int min_labelno;
232 int max_labelno;
233 int max_reg;
234 int copy;
236 int function_flags, i;
237 rtvec arg_vector;
238 tree parms;
240 /* Compute the values of any flags we must restore when inlining this. */
242 function_flags
243 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
244 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
245 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
246 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
247 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
248 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
249 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
250 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
251 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
252 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
254 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
255 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
256 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
258 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
259 parms;
260 parms = TREE_CHAIN (parms), i++)
262 rtx p = DECL_RTL (parms);
264 if (GET_CODE (p) == MEM && copy)
266 /* Copy the rtl so that modifications of the addresses
267 later in compilation won't affect this arg_vector.
268 Virtual register instantiation can screw the address
269 of the rtl. */
270 rtx new = copy_rtx (p);
272 /* Don't leave the old copy anywhere in this decl. */
273 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
274 || (GET_CODE (DECL_RTL (parms)) == MEM
275 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
276 && (XEXP (DECL_RTL (parms), 0)
277 == XEXP (DECL_INCOMING_RTL (parms), 0))))
278 DECL_INCOMING_RTL (parms) = new;
279 DECL_RTL (parms) = new;
282 RTVEC_ELT (arg_vector, i) = p;
284 if (GET_CODE (p) == REG)
285 parmdecl_map[REGNO (p)] = parms;
286 else if (GET_CODE (p) == CONCAT)
288 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
289 rtx pimag = gen_imagpart (GET_MODE (preal), p);
291 if (GET_CODE (preal) == REG)
292 parmdecl_map[REGNO (preal)] = parms;
293 if (GET_CODE (pimag) == REG)
294 parmdecl_map[REGNO (pimag)] = parms;
297 /* This flag is cleared later
298 if the function ever modifies the value of the parm. */
299 TREE_READONLY (parms) = 1;
302 /* Assume we start out in the insns that set up the parameters. */
303 in_nonparm_insns = 0;
305 /* The list of DECL_SAVED_INSNS, starts off with a header which
306 contains the following information:
308 the first insn of the function (not including the insns that copy
309 parameters into registers).
310 the first parameter insn of the function,
311 the first label used by that function,
312 the last label used by that function,
313 the highest register number used for parameters,
314 the total number of registers used,
315 the size of the incoming stack area for parameters,
316 the number of bytes popped on return,
317 the stack slot list,
318 some flags that are used to restore compiler globals,
319 the value of current_function_outgoing_args_size,
320 the original argument vector,
321 and the original DECL_INITIAL. */
323 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
324 max_parm_reg, max_reg,
325 current_function_args_size,
326 current_function_pops_args,
327 stack_slot_list, function_flags,
328 current_function_outgoing_args_size,
329 arg_vector, (rtx) DECL_INITIAL (fndecl));
332 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
333 things that must be done to make FNDECL expandable as an inline function.
334 HEAD contains the chain of insns to which FNDECL will expand. */
336 static void
337 finish_inline (fndecl, head)
338 tree fndecl;
339 rtx head;
341 NEXT_INSN (head) = get_first_nonparm_insn ();
342 FIRST_PARM_INSN (head) = get_insns ();
343 DECL_SAVED_INSNS (fndecl) = head;
344 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
345 DECL_INLINE (fndecl) = 1;
348 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
349 they all point to the new (copied) rtxs. */
351 static void
352 adjust_copied_decl_tree (block)
353 register tree block;
355 register tree subblock;
356 register rtx original_end;
358 original_end = BLOCK_END_NOTE (block);
359 if (original_end)
361 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
362 NOTE_SOURCE_FILE (original_end) = 0;
365 /* Process all subblocks. */
366 for (subblock = BLOCK_SUBBLOCKS (block);
367 subblock;
368 subblock = TREE_CHAIN (subblock))
369 adjust_copied_decl_tree (subblock);
372 /* Make the insns and PARM_DECLs of the current function permanent
373 and record other information in DECL_SAVED_INSNS to allow inlining
374 of this function in subsequent calls.
376 This function is called when we are going to immediately compile
377 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
378 modified by the compilation process, so we copy all of them to
379 new storage and consider the new insns to be the insn chain to be
380 compiled. Our caller (rest_of_compilation) saves the original
381 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
383 /* ??? The nonlocal_label list should be adjusted also. However, since
384 a function that contains a nested function never gets inlined currently,
385 the nonlocal_label list will always be empty, so we don't worry about
386 it for now. */
388 void
389 save_for_inline_copying (fndecl)
390 tree fndecl;
392 rtx first_insn, last_insn, insn;
393 rtx head, copy;
394 int max_labelno, min_labelno, i, len;
395 int max_reg;
396 int max_uid;
397 rtx first_nonparm_insn;
399 /* Make and emit a return-label if we have not already done so.
400 Do this before recording the bounds on label numbers. */
402 if (return_label == 0)
404 return_label = gen_label_rtx ();
405 emit_label (return_label);
408 /* Get some bounds on the labels and registers used. */
410 max_labelno = max_label_num ();
411 min_labelno = get_first_label_num ();
412 max_reg = max_reg_num ();
414 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
415 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
416 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
417 for the parms, prior to elimination of virtual registers.
418 These values are needed for substituting parms properly. */
420 max_parm_reg = max_parm_reg_num ();
421 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
423 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
425 if (current_function_uses_const_pool)
427 /* Replace any constant pool references with the actual constant. We
428 will put the constants back in the copy made below. */
429 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
430 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
432 save_constants (&PATTERN (insn));
433 if (REG_NOTES (insn))
434 save_constants (&REG_NOTES (insn));
437 /* Clear out the constant pool so that we can recreate it with the
438 copied constants below. */
439 init_const_rtx_hash_table ();
440 clear_const_double_mem ();
443 max_uid = INSN_UID (head);
445 /* We have now allocated all that needs to be allocated permanently
446 on the rtx obstack. Set our high-water mark, so that we
447 can free the rest of this when the time comes. */
449 preserve_data ();
451 /* Copy the chain insns of this function.
452 Install the copied chain as the insns of this function,
453 for continued compilation;
454 the original chain is recorded as the DECL_SAVED_INSNS
455 for inlining future calls. */
457 /* If there are insns that copy parms from the stack into pseudo registers,
458 those insns are not copied. `expand_inline_function' must
459 emit the correct code to handle such things. */
461 insn = get_insns ();
462 if (GET_CODE (insn) != NOTE)
463 abort ();
464 first_insn = rtx_alloc (NOTE);
465 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
466 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
467 INSN_UID (first_insn) = INSN_UID (insn);
468 PREV_INSN (first_insn) = NULL;
469 NEXT_INSN (first_insn) = NULL;
470 last_insn = first_insn;
472 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
473 Make these new rtx's now, and install them in regno_reg_rtx, so they
474 will be the official pseudo-reg rtx's for the rest of compilation. */
476 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
478 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
479 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
480 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
481 regno_reg_rtx[i], len);
483 bcopy ((char *) (reg_map + LAST_VIRTUAL_REGISTER + 1),
484 (char *) (regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1),
485 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
487 /* Likewise each label rtx must have a unique rtx as its copy. */
489 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
490 label_map -= min_labelno;
492 for (i = min_labelno; i < max_labelno; i++)
493 label_map[i] = gen_label_rtx ();
495 /* Record the mapping of old insns to copied insns. */
497 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
498 bzero ((char *) insn_map, max_uid * sizeof (rtx));
500 /* Get the insn which signals the end of parameter setup code. */
501 first_nonparm_insn = get_first_nonparm_insn ();
503 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
504 (the former occurs when a variable has its address taken)
505 since these may be shared and can be changed by virtual
506 register instantiation. DECL_RTL values for our arguments
507 have already been copied by initialize_for_inline. */
508 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
509 if (GET_CODE (regno_reg_rtx[i]) == MEM)
510 XEXP (regno_reg_rtx[i], 0)
511 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
513 /* Copy the tree of subblocks of the function, and the decls in them.
514 We will use the copy for compiling this function, then restore the original
515 subblocks and decls for use when inlining this function.
517 Several parts of the compiler modify BLOCK trees. In particular,
518 instantiate_virtual_regs will instantiate any virtual regs
519 mentioned in the DECL_RTLs of the decls, and loop
520 unrolling will replicate any BLOCK trees inside an unrolled loop.
522 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
523 which we will use for inlining. The rtl might even contain pseudoregs
524 whose space has been freed. */
526 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
527 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
529 /* Now copy each DECL_RTL which is a MEM,
530 so it is safe to modify their addresses. */
531 copy_decl_rtls (DECL_INITIAL (fndecl));
533 /* The fndecl node acts as its own progenitor, so mark it as such. */
534 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
536 /* Now copy the chain of insns. Do this twice. The first copy the insn
537 itself and its body. The second time copy of REG_NOTES. This is because
538 a REG_NOTE may have a forward pointer to another insn. */
540 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
542 orig_asm_operands_vector = 0;
544 if (insn == first_nonparm_insn)
545 in_nonparm_insns = 1;
547 switch (GET_CODE (insn))
549 case NOTE:
550 /* No need to keep these. */
551 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
552 continue;
554 copy = rtx_alloc (NOTE);
555 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
556 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
557 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
558 else
560 NOTE_SOURCE_FILE (insn) = (char *) copy;
561 NOTE_SOURCE_FILE (copy) = 0;
563 break;
565 case INSN:
566 case JUMP_INSN:
567 case CALL_INSN:
568 copy = rtx_alloc (GET_CODE (insn));
570 if (GET_CODE (insn) == CALL_INSN)
571 CALL_INSN_FUNCTION_USAGE (copy) =
572 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
574 PATTERN (copy) = copy_for_inline (PATTERN (insn));
575 INSN_CODE (copy) = -1;
576 LOG_LINKS (copy) = NULL_RTX;
577 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
578 break;
580 case CODE_LABEL:
581 copy = label_map[CODE_LABEL_NUMBER (insn)];
582 LABEL_NAME (copy) = LABEL_NAME (insn);
583 break;
585 case BARRIER:
586 copy = rtx_alloc (BARRIER);
587 break;
589 default:
590 abort ();
592 INSN_UID (copy) = INSN_UID (insn);
593 insn_map[INSN_UID (insn)] = copy;
594 NEXT_INSN (last_insn) = copy;
595 PREV_INSN (copy) = last_insn;
596 last_insn = copy;
599 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
601 /* Now copy the REG_NOTES. */
602 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
603 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
604 && insn_map[INSN_UID(insn)])
605 REG_NOTES (insn_map[INSN_UID (insn)])
606 = copy_for_inline (REG_NOTES (insn));
608 NEXT_INSN (last_insn) = NULL;
610 finish_inline (fndecl, head);
612 set_new_first_and_last_insn (first_insn, last_insn);
615 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
616 For example, this can copy a list made of TREE_LIST nodes. While copying,
617 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
618 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
619 point to the corresponding (abstract) original node. */
621 static tree
622 copy_decl_list (list)
623 tree list;
625 tree head;
626 register tree prev, next;
628 if (list == 0)
629 return 0;
631 head = prev = copy_node (list);
632 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
633 DECL_ABSTRACT_ORIGIN (head) = list;
634 next = TREE_CHAIN (list);
635 while (next)
637 register tree copy;
639 copy = copy_node (next);
640 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
641 DECL_ABSTRACT_ORIGIN (copy) = next;
642 TREE_CHAIN (prev) = copy;
643 prev = copy;
644 next = TREE_CHAIN (next);
646 return head;
649 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
651 static tree
652 copy_decl_tree (block)
653 tree block;
655 tree t, vars, subblocks;
657 vars = copy_decl_list (BLOCK_VARS (block));
658 subblocks = 0;
660 /* Process all subblocks. */
661 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
663 tree copy = copy_decl_tree (t);
664 TREE_CHAIN (copy) = subblocks;
665 subblocks = copy;
668 t = copy_node (block);
669 BLOCK_VARS (t) = vars;
670 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
671 /* If the BLOCK being cloned is already marked as having been instantiated
672 from something else, then leave that `origin' marking alone. Elsewise,
673 mark the clone as having originated from the BLOCK we are cloning. */
674 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
675 BLOCK_ABSTRACT_ORIGIN (t) = block;
676 return t;
679 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
681 static void
682 copy_decl_rtls (block)
683 tree block;
685 tree t;
687 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
688 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
689 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
691 /* Process all subblocks. */
692 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
693 copy_decl_rtls (t);
696 /* Make the insns and PARM_DECLs of the current function permanent
697 and record other information in DECL_SAVED_INSNS to allow inlining
698 of this function in subsequent calls.
700 This routine need not copy any insns because we are not going
701 to immediately compile the insns in the insn chain. There
702 are two cases when we would compile the insns for FNDECL:
703 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
704 be output at the end of other compilation, because somebody took
705 its address. In the first case, the insns of FNDECL are copied
706 as it is expanded inline, so FNDECL's saved insns are not
707 modified. In the second case, FNDECL is used for the last time,
708 so modifying the rtl is not a problem.
710 ??? Actually, we do not verify that FNDECL is not inline expanded
711 by other functions which must also be written down at the end
712 of compilation. We could set flag_no_inline to nonzero when
713 the time comes to write down such functions. */
715 void
716 save_for_inline_nocopy (fndecl)
717 tree fndecl;
719 rtx insn;
720 rtx head;
721 rtx first_nonparm_insn;
723 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
724 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
725 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
726 for the parms, prior to elimination of virtual registers.
727 These values are needed for substituting parms properly. */
729 max_parm_reg = max_parm_reg_num ();
730 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
732 /* Make and emit a return-label if we have not already done so. */
734 if (return_label == 0)
736 return_label = gen_label_rtx ();
737 emit_label (return_label);
740 head = initialize_for_inline (fndecl, get_first_label_num (),
741 max_label_num (), max_reg_num (), 0);
743 /* If there are insns that copy parms from the stack into pseudo registers,
744 those insns are not copied. `expand_inline_function' must
745 emit the correct code to handle such things. */
747 insn = get_insns ();
748 if (GET_CODE (insn) != NOTE)
749 abort ();
751 /* Get the insn which signals the end of parameter setup code. */
752 first_nonparm_insn = get_first_nonparm_insn ();
754 /* Now just scan the chain of insns to see what happens to our
755 PARM_DECLs. If a PARM_DECL is used but never modified, we
756 can substitute its rtl directly when expanding inline (and
757 perform constant folding when its incoming value is constant).
758 Otherwise, we have to copy its value into a new register and track
759 the new register's life. */
761 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
763 if (insn == first_nonparm_insn)
764 in_nonparm_insns = 1;
766 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
768 if (current_function_uses_const_pool)
770 /* Replace any constant pool references with the actual constant.
771 We will put the constant back if we need to write the
772 function out after all. */
773 save_constants (&PATTERN (insn));
774 if (REG_NOTES (insn))
775 save_constants (&REG_NOTES (insn));
778 /* Record what interesting things happen to our parameters. */
779 note_stores (PATTERN (insn), note_modified_parmregs);
783 /* We have now allocated all that needs to be allocated permanently
784 on the rtx obstack. Set our high-water mark, so that we
785 can free the rest of this when the time comes. */
787 preserve_data ();
789 finish_inline (fndecl, head);
792 /* Given PX, a pointer into an insn, search for references to the constant
793 pool. Replace each with a CONST that has the mode of the original
794 constant, contains the constant, and has RTX_INTEGRATED_P set.
795 Similarly, constant pool addresses not enclosed in a MEM are replaced
796 with an ADDRESS rtx which also gives the constant, mode, and has
797 RTX_INTEGRATED_P set. */
799 static void
800 save_constants (px)
801 rtx *px;
803 rtx x;
804 int i, j;
806 again:
807 x = *px;
809 /* If this is a CONST_DOUBLE, don't try to fix things up in
810 CONST_DOUBLE_MEM, because this is an infinite recursion. */
811 if (GET_CODE (x) == CONST_DOUBLE)
812 return;
813 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
814 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
816 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
817 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
818 RTX_INTEGRATED_P (new) = 1;
820 /* If the MEM was in a different mode than the constant (perhaps we
821 were only looking at the low-order part), surround it with a
822 SUBREG so we can save both modes. */
824 if (GET_MODE (x) != const_mode)
826 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
827 RTX_INTEGRATED_P (new) = 1;
830 *px = new;
831 save_constants (&XEXP (*px, 0));
833 else if (GET_CODE (x) == SYMBOL_REF
834 && CONSTANT_POOL_ADDRESS_P (x))
836 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
837 save_constants (&XEXP (*px, 0));
838 RTX_INTEGRATED_P (*px) = 1;
841 else
843 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
844 int len = GET_RTX_LENGTH (GET_CODE (x));
846 for (i = len-1; i >= 0; i--)
848 switch (fmt[i])
850 case 'E':
851 for (j = 0; j < XVECLEN (x, i); j++)
852 save_constants (&XVECEXP (x, i, j));
853 break;
855 case 'e':
856 if (XEXP (x, i) == 0)
857 continue;
858 if (i == 0)
860 /* Hack tail-recursion here. */
861 px = &XEXP (x, 0);
862 goto again;
864 save_constants (&XEXP (x, i));
865 break;
871 /* Note whether a parameter is modified or not. */
873 static void
874 note_modified_parmregs (reg, x)
875 rtx reg;
876 rtx x;
878 if (GET_CODE (reg) == REG && in_nonparm_insns
879 && REGNO (reg) < max_parm_reg
880 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
881 && parmdecl_map[REGNO (reg)] != 0)
882 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
885 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
886 according to `reg_map' and `label_map'. The original rtl insns
887 will be saved for inlining; this is used to make a copy
888 which is used to finish compiling the inline function itself.
890 If we find a "saved" constant pool entry, one which was replaced with
891 the value of the constant, convert it back to a constant pool entry.
892 Since the pool wasn't touched, this should simply restore the old
893 address.
895 All other kinds of rtx are copied except those that can never be
896 changed during compilation. */
898 static rtx
899 copy_for_inline (orig)
900 rtx orig;
902 register rtx x = orig;
903 register int i;
904 register enum rtx_code code;
905 register char *format_ptr;
907 if (x == 0)
908 return x;
910 code = GET_CODE (x);
912 /* These types may be freely shared. */
914 switch (code)
916 case QUEUED:
917 case CONST_INT:
918 case SYMBOL_REF:
919 case PC:
920 case CC0:
921 return x;
923 case CONST_DOUBLE:
924 /* We have to make a new CONST_DOUBLE to ensure that we account for
925 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
926 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
928 REAL_VALUE_TYPE d;
930 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
931 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
933 else
934 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
935 VOIDmode);
937 case CONST:
938 /* Get constant pool entry for constant in the pool. */
939 if (RTX_INTEGRATED_P (x))
940 return validize_mem (force_const_mem (GET_MODE (x),
941 copy_for_inline (XEXP (x, 0))));
942 break;
944 case SUBREG:
945 /* Get constant pool entry, but access in different mode. */
946 if (RTX_INTEGRATED_P (x))
948 rtx new
949 = force_const_mem (GET_MODE (SUBREG_REG (x)),
950 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
952 PUT_MODE (new, GET_MODE (x));
953 return validize_mem (new);
955 break;
957 case ADDRESS:
958 /* If not special for constant pool error. Else get constant pool
959 address. */
960 if (! RTX_INTEGRATED_P (x))
961 abort ();
963 return XEXP (force_const_mem (GET_MODE (x),
964 copy_for_inline (XEXP (x, 0))), 0);
966 case ASM_OPERANDS:
967 /* If a single asm insn contains multiple output operands
968 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
969 We must make sure that the copied insn continues to share it. */
970 if (orig_asm_operands_vector == XVEC (orig, 3))
972 x = rtx_alloc (ASM_OPERANDS);
973 x->volatil = orig->volatil;
974 XSTR (x, 0) = XSTR (orig, 0);
975 XSTR (x, 1) = XSTR (orig, 1);
976 XINT (x, 2) = XINT (orig, 2);
977 XVEC (x, 3) = copy_asm_operands_vector;
978 XVEC (x, 4) = copy_asm_constraints_vector;
979 XSTR (x, 5) = XSTR (orig, 5);
980 XINT (x, 6) = XINT (orig, 6);
981 return x;
983 break;
985 case MEM:
986 /* A MEM is usually allowed to be shared if its address is constant
987 or is a constant plus one of the special registers.
989 We do not allow sharing of addresses that are either a special
990 register or the sum of a constant and a special register because
991 it is possible for unshare_all_rtl to copy the address, into memory
992 that won't be saved. Although the MEM can safely be shared, and
993 won't be copied there, the address itself cannot be shared, and may
994 need to be copied.
996 There are also two exceptions with constants: The first is if the
997 constant is a LABEL_REF or the sum of the LABEL_REF
998 and an integer. This case can happen if we have an inline
999 function that supplies a constant operand to the call of another
1000 inline function that uses it in a switch statement. In this case,
1001 we will be replacing the LABEL_REF, so we have to replace this MEM
1002 as well.
1004 The second case is if we have a (const (plus (address ..) ...)).
1005 In that case we need to put back the address of the constant pool
1006 entry. */
1008 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1009 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1010 && ! (GET_CODE (XEXP (x, 0)) == CONST
1011 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1012 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1013 == LABEL_REF)
1014 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1015 == ADDRESS)))))
1016 return x;
1017 break;
1019 case LABEL_REF:
1020 /* If this is a non-local label, just make a new LABEL_REF.
1021 Otherwise, use the new label as well. */
1022 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1023 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1024 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1025 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1026 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1027 return x;
1029 case REG:
1030 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1031 return reg_map [REGNO (x)];
1032 else
1033 return x;
1035 case SET:
1036 /* If a parm that gets modified lives in a pseudo-reg,
1037 clear its TREE_READONLY to prevent certain optimizations. */
1039 rtx dest = SET_DEST (x);
1041 while (GET_CODE (dest) == STRICT_LOW_PART
1042 || GET_CODE (dest) == ZERO_EXTRACT
1043 || GET_CODE (dest) == SUBREG)
1044 dest = XEXP (dest, 0);
1046 if (GET_CODE (dest) == REG
1047 && REGNO (dest) < max_parm_reg
1048 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1049 && parmdecl_map[REGNO (dest)] != 0
1050 /* The insn to load an arg pseudo from a stack slot
1051 does not count as modifying it. */
1052 && in_nonparm_insns)
1053 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1055 break;
1057 #if 0 /* This is a good idea, but here is the wrong place for it. */
1058 /* Arrange that CONST_INTs always appear as the second operand
1059 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1060 always appear as the first. */
1061 case PLUS:
1062 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1063 || (XEXP (x, 1) == frame_pointer_rtx
1064 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1065 && XEXP (x, 1) == arg_pointer_rtx)))
1067 rtx t = XEXP (x, 0);
1068 XEXP (x, 0) = XEXP (x, 1);
1069 XEXP (x, 1) = t;
1071 break;
1072 #endif
1075 /* Replace this rtx with a copy of itself. */
1077 x = rtx_alloc (code);
1078 bcopy ((char *) orig, (char *) x,
1079 (sizeof (*x) - sizeof (x->fld)
1080 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1082 /* Now scan the subexpressions recursively.
1083 We can store any replaced subexpressions directly into X
1084 since we know X is not shared! Any vectors in X
1085 must be copied if X was copied. */
1087 format_ptr = GET_RTX_FORMAT (code);
1089 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1091 switch (*format_ptr++)
1093 case 'e':
1094 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1095 break;
1097 case 'u':
1098 /* Change any references to old-insns to point to the
1099 corresponding copied insns. */
1100 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1101 break;
1103 case 'E':
1104 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1106 register int j;
1108 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1109 for (j = 0; j < XVECLEN (x, i); j++)
1110 XVECEXP (x, i, j)
1111 = copy_for_inline (XVECEXP (x, i, j));
1113 break;
1117 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1119 orig_asm_operands_vector = XVEC (orig, 3);
1120 copy_asm_operands_vector = XVEC (x, 3);
1121 copy_asm_constraints_vector = XVEC (x, 4);
1124 return x;
1127 /* Unfortunately, we need a global copy of const_equiv map for communication
1128 with a function called from note_stores. Be *very* careful that this
1129 is used properly in the presence of recursion. */
1131 rtx *global_const_equiv_map;
1132 int global_const_equiv_map_size;
1134 #define FIXED_BASE_PLUS_P(X) \
1135 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1136 && GET_CODE (XEXP (X, 0)) == REG \
1137 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1138 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1140 /* Integrate the procedure defined by FNDECL. Note that this function
1141 may wind up calling itself. Since the static variables are not
1142 reentrant, we do not assign them until after the possibility
1143 of recursion is eliminated.
1145 If IGNORE is nonzero, do not produce a value.
1146 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1148 Value is:
1149 (rtx)-1 if we could not substitute the function
1150 0 if we substituted it and it does not produce a value
1151 else an rtx for where the value is stored. */
1154 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1155 tree fndecl, parms;
1156 rtx target;
1157 int ignore;
1158 tree type;
1159 rtx structure_value_addr;
1161 tree formal, actual, block;
1162 rtx header = DECL_SAVED_INSNS (fndecl);
1163 rtx insns = FIRST_FUNCTION_INSN (header);
1164 rtx parm_insns = FIRST_PARM_INSN (header);
1165 tree *arg_trees;
1166 rtx *arg_vals;
1167 rtx insn;
1168 int max_regno;
1169 register int i;
1170 int min_labelno = FIRST_LABELNO (header);
1171 int max_labelno = LAST_LABELNO (header);
1172 int nargs;
1173 rtx local_return_label = 0;
1174 rtx loc;
1175 rtx stack_save = 0;
1176 rtx temp;
1177 struct inline_remap *map;
1178 rtx cc0_insn = 0;
1179 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1180 rtx static_chain_value = 0;
1182 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1183 max_regno = MAX_REGNUM (header) + 3;
1184 if (max_regno < FIRST_PSEUDO_REGISTER)
1185 abort ();
1187 nargs = list_length (DECL_ARGUMENTS (fndecl));
1189 /* Check that the parms type match and that sufficient arguments were
1190 passed. Since the appropriate conversions or default promotions have
1191 already been applied, the machine modes should match exactly. */
1193 for (formal = DECL_ARGUMENTS (fndecl),
1194 actual = parms;
1195 formal;
1196 formal = TREE_CHAIN (formal),
1197 actual = TREE_CHAIN (actual))
1199 tree arg;
1200 enum machine_mode mode;
1202 if (actual == 0)
1203 return (rtx) (HOST_WIDE_INT) -1;
1205 arg = TREE_VALUE (actual);
1206 mode= TYPE_MODE (DECL_ARG_TYPE (formal));
1208 if (mode != TYPE_MODE (TREE_TYPE (arg))
1209 /* If they are block mode, the types should match exactly.
1210 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1211 which could happen if the parameter has incomplete type. */
1212 || (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)))
1213 return (rtx) (HOST_WIDE_INT) -1;
1216 /* Extra arguments are valid, but will be ignored below, so we must
1217 evaluate them here for side-effects. */
1218 for (; actual; actual = TREE_CHAIN (actual))
1219 expand_expr (TREE_VALUE (actual), const0_rtx,
1220 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1222 /* Make a binding contour to keep inline cleanups called at
1223 outer function-scope level from looking like they are shadowing
1224 parameter declarations. */
1225 pushlevel (0);
1227 /* Make a fresh binding contour that we can easily remove. */
1228 pushlevel (0);
1229 expand_start_bindings (0);
1230 if (GET_CODE (parm_insns) == NOTE
1231 && NOTE_LINE_NUMBER (parm_insns) > 0)
1233 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1234 NOTE_LINE_NUMBER (parm_insns));
1235 if (note)
1236 RTX_INTEGRATED_P (note) = 1;
1239 /* Expand the function arguments. Do this first so that any
1240 new registers get created before we allocate the maps. */
1242 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1243 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1245 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1246 formal;
1247 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1249 /* Actual parameter, converted to the type of the argument within the
1250 function. */
1251 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1252 /* Mode of the variable used within the function. */
1253 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1254 int invisiref = 0;
1256 /* Make sure this formal has some correspondence in the users code
1257 * before emitting any line notes for it. */
1258 if (DECL_SOURCE_LINE (formal))
1260 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1261 DECL_SOURCE_LINE (formal));
1262 if (note)
1263 RTX_INTEGRATED_P (note) = 1;
1266 arg_trees[i] = arg;
1267 loc = RTVEC_ELT (arg_vector, i);
1269 /* If this is an object passed by invisible reference, we copy the
1270 object into a stack slot and save its address. If this will go
1271 into memory, we do nothing now. Otherwise, we just expand the
1272 argument. */
1273 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1274 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1276 rtx stack_slot
1277 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1278 int_size_in_bytes (TREE_TYPE (arg)), 1);
1279 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1281 store_expr (arg, stack_slot, 0);
1283 arg_vals[i] = XEXP (stack_slot, 0);
1284 invisiref = 1;
1286 else if (GET_CODE (loc) != MEM)
1288 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1289 /* The mode if LOC and ARG can differ if LOC was a variable
1290 that had its mode promoted via PROMOTED_MODE. */
1291 arg_vals[i] = convert_modes (GET_MODE (loc),
1292 TYPE_MODE (TREE_TYPE (arg)),
1293 expand_expr (arg, NULL_RTX, mode,
1294 EXPAND_SUM),
1295 TREE_UNSIGNED (TREE_TYPE (formal)));
1296 else
1297 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1299 else
1300 arg_vals[i] = 0;
1302 if (arg_vals[i] != 0
1303 && (! TREE_READONLY (formal)
1304 /* If the parameter is not read-only, copy our argument through
1305 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1306 TARGET in any way. In the inline function, they will likely
1307 be two different pseudos, and `safe_from_p' will make all
1308 sorts of smart assumptions about their not conflicting.
1309 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1310 wrong, so put ARG_VALS[I] into a fresh register.
1311 Don't worry about invisible references, since their stack
1312 temps will never overlap the target. */
1313 || (target != 0
1314 && ! invisiref
1315 && (GET_CODE (arg_vals[i]) == REG
1316 || GET_CODE (arg_vals[i]) == SUBREG
1317 || GET_CODE (arg_vals[i]) == MEM)
1318 && reg_overlap_mentioned_p (arg_vals[i], target))
1319 /* ??? We must always copy a SUBREG into a REG, because it might
1320 get substituted into an address, and not all ports correctly
1321 handle SUBREGs in addresses. */
1322 || (GET_CODE (arg_vals[i]) == SUBREG)))
1323 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1326 /* Allocate the structures we use to remap things. */
1328 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1329 map->fndecl = fndecl;
1331 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1332 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1334 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1335 map->label_map -= min_labelno;
1337 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1338 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1339 map->min_insnno = 0;
1340 map->max_insnno = INSN_UID (header);
1342 map->integrating = 1;
1344 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1345 be large enough for all our pseudos. This is the number we are currently
1346 using plus the number in the called routine, plus 15 for each arg,
1347 five to compute the virtual frame pointer, and five for the return value.
1348 This should be enough for most cases. We do not reference entries
1349 outside the range of the map.
1351 ??? These numbers are quite arbitrary and were obtained by
1352 experimentation. At some point, we should try to allocate the
1353 table after all the parameters are set up so we an more accurately
1354 estimate the number of pseudos we will need. */
1356 map->const_equiv_map_size
1357 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1359 map->const_equiv_map
1360 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1361 bzero ((char *) map->const_equiv_map,
1362 map->const_equiv_map_size * sizeof (rtx));
1364 map->const_age_map
1365 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1366 bzero ((char *) map->const_age_map,
1367 map->const_equiv_map_size * sizeof (unsigned));
1368 map->const_age = 0;
1370 /* Record the current insn in case we have to set up pointers to frame
1371 and argument memory blocks. */
1372 map->insns_at_start = get_last_insn ();
1374 /* Update the outgoing argument size to allow for those in the inlined
1375 function. */
1376 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1377 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1379 /* If the inline function needs to make PIC references, that means
1380 that this function's PIC offset table must be used. */
1381 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1382 current_function_uses_pic_offset_table = 1;
1384 /* If this function needs a context, set it up. */
1385 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1386 static_chain_value = lookup_static_chain (fndecl);
1388 /* Process each argument. For each, set up things so that the function's
1389 reference to the argument will refer to the argument being passed.
1390 We only replace REG with REG here. Any simplifications are done
1391 via const_equiv_map.
1393 We make two passes: In the first, we deal with parameters that will
1394 be placed into registers, since we need to ensure that the allocated
1395 register number fits in const_equiv_map. Then we store all non-register
1396 parameters into their memory location. */
1398 /* Don't try to free temp stack slots here, because we may put one of the
1399 parameters into a temp stack slot. */
1401 for (i = 0; i < nargs; i++)
1403 rtx copy = arg_vals[i];
1405 loc = RTVEC_ELT (arg_vector, i);
1407 /* There are three cases, each handled separately. */
1408 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1409 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1411 /* This must be an object passed by invisible reference (it could
1412 also be a variable-sized object, but we forbid inlining functions
1413 with variable-sized arguments). COPY is the address of the
1414 actual value (this computation will cause it to be copied). We
1415 map that address for the register, noting the actual address as
1416 an equivalent in case it can be substituted into the insns. */
1418 if (GET_CODE (copy) != REG)
1420 temp = copy_addr_to_reg (copy);
1421 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1422 && REGNO (temp) < map->const_equiv_map_size)
1424 map->const_equiv_map[REGNO (temp)] = copy;
1425 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1427 copy = temp;
1429 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1431 else if (GET_CODE (loc) == MEM)
1433 /* This is the case of a parameter that lives in memory.
1434 It will live in the block we allocate in the called routine's
1435 frame that simulates the incoming argument area. Do nothing
1436 now; we will call store_expr later. */
1439 else if (GET_CODE (loc) == REG)
1441 /* This is the good case where the parameter is in a register.
1442 If it is read-only and our argument is a constant, set up the
1443 constant equivalence.
1445 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1446 that flag set if it is a register.
1448 Also, don't allow hard registers here; they might not be valid
1449 when substituted into insns. */
1451 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1452 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1453 && ! REG_USERVAR_P (copy))
1454 || (GET_CODE (copy) == REG
1455 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1457 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1458 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1459 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1460 && REGNO (temp) < map->const_equiv_map_size)
1462 map->const_equiv_map[REGNO (temp)] = copy;
1463 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1465 copy = temp;
1467 map->reg_map[REGNO (loc)] = copy;
1469 else if (GET_CODE (loc) == CONCAT)
1471 /* This is the good case where the parameter is in a
1472 pair of separate pseudos.
1473 If it is read-only and our argument is a constant, set up the
1474 constant equivalence.
1476 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1477 that flag set if it is a register.
1479 Also, don't allow hard registers here; they might not be valid
1480 when substituted into insns. */
1481 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1482 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1483 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1484 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1486 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1487 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1488 && ! REG_USERVAR_P (copyreal))
1489 || (GET_CODE (copyreal) == REG
1490 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1492 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1493 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1494 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1495 && REGNO (temp) < map->const_equiv_map_size)
1497 map->const_equiv_map[REGNO (temp)] = copyreal;
1498 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1500 copyreal = temp;
1502 map->reg_map[REGNO (locreal)] = copyreal;
1504 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1505 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1506 && ! REG_USERVAR_P (copyimag))
1507 || (GET_CODE (copyimag) == REG
1508 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1510 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1511 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1512 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1513 && REGNO (temp) < map->const_equiv_map_size)
1515 map->const_equiv_map[REGNO (temp)] = copyimag;
1516 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1518 copyimag = temp;
1520 map->reg_map[REGNO (locimag)] = copyimag;
1522 else
1523 abort ();
1526 /* Now do the parameters that will be placed in memory. */
1528 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1529 formal; formal = TREE_CHAIN (formal), i++)
1531 loc = RTVEC_ELT (arg_vector, i);
1533 if (GET_CODE (loc) == MEM
1534 /* Exclude case handled above. */
1535 && ! (GET_CODE (XEXP (loc, 0)) == REG
1536 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1538 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1539 DECL_SOURCE_LINE (formal));
1540 if (note)
1541 RTX_INTEGRATED_P (note) = 1;
1543 /* Compute the address in the area we reserved and store the
1544 value there. */
1545 temp = copy_rtx_and_substitute (loc, map);
1546 subst_constants (&temp, NULL_RTX, map);
1547 apply_change_group ();
1548 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1549 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1550 store_expr (arg_trees[i], temp, 0);
1554 /* Deal with the places that the function puts its result.
1555 We are driven by what is placed into DECL_RESULT.
1557 Initially, we assume that we don't have anything special handling for
1558 REG_FUNCTION_RETURN_VALUE_P. */
1560 map->inline_target = 0;
1561 loc = DECL_RTL (DECL_RESULT (fndecl));
1562 if (TYPE_MODE (type) == VOIDmode)
1563 /* There is no return value to worry about. */
1565 else if (GET_CODE (loc) == MEM)
1567 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1568 abort ();
1570 /* Pass the function the address in which to return a structure value.
1571 Note that a constructor can cause someone to call us with
1572 STRUCTURE_VALUE_ADDR, but the initialization takes place
1573 via the first parameter, rather than the struct return address.
1575 We have two cases: If the address is a simple register indirect,
1576 use the mapping mechanism to point that register to our structure
1577 return address. Otherwise, store the structure return value into
1578 the place that it will be referenced from. */
1580 if (GET_CODE (XEXP (loc, 0)) == REG)
1582 temp = force_reg (Pmode, structure_value_addr);
1583 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1584 if ((CONSTANT_P (structure_value_addr)
1585 || (GET_CODE (structure_value_addr) == PLUS
1586 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1587 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1588 && REGNO (temp) < map->const_equiv_map_size)
1590 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1591 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1594 else
1596 temp = copy_rtx_and_substitute (loc, map);
1597 subst_constants (&temp, NULL_RTX, map);
1598 apply_change_group ();
1599 emit_move_insn (temp, structure_value_addr);
1602 else if (ignore)
1603 /* We will ignore the result value, so don't look at its structure.
1604 Note that preparations for an aggregate return value
1605 do need to be made (above) even if it will be ignored. */
1607 else if (GET_CODE (loc) == REG)
1609 /* The function returns an object in a register and we use the return
1610 value. Set up our target for remapping. */
1612 /* Machine mode function was declared to return. */
1613 enum machine_mode departing_mode = TYPE_MODE (type);
1614 /* (Possibly wider) machine mode it actually computes
1615 (for the sake of callers that fail to declare it right). */
1616 enum machine_mode arriving_mode
1617 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1618 rtx reg_to_map;
1620 /* Don't use MEMs as direct targets because on some machines
1621 substituting a MEM for a REG makes invalid insns.
1622 Let the combiner substitute the MEM if that is valid. */
1623 if (target == 0 || GET_CODE (target) != REG
1624 || GET_MODE (target) != departing_mode)
1625 target = gen_reg_rtx (departing_mode);
1627 /* If function's value was promoted before return,
1628 avoid machine mode mismatch when we substitute INLINE_TARGET.
1629 But TARGET is what we will return to the caller. */
1630 if (arriving_mode != departing_mode)
1631 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1632 else
1633 reg_to_map = target;
1635 /* Usually, the result value is the machine's return register.
1636 Sometimes it may be a pseudo. Handle both cases. */
1637 if (REG_FUNCTION_VALUE_P (loc))
1638 map->inline_target = reg_to_map;
1639 else
1640 map->reg_map[REGNO (loc)] = reg_to_map;
1643 /* Make new label equivalences for the labels in the called function. */
1644 for (i = min_labelno; i < max_labelno; i++)
1645 map->label_map[i] = gen_label_rtx ();
1647 /* Perform postincrements before actually calling the function. */
1648 emit_queue ();
1650 /* Clean up stack so that variables might have smaller offsets. */
1651 do_pending_stack_adjust ();
1653 /* Save a copy of the location of const_equiv_map for mark_stores, called
1654 via note_stores. */
1655 global_const_equiv_map = map->const_equiv_map;
1656 global_const_equiv_map_size = map->const_equiv_map_size;
1658 /* If the called function does an alloca, save and restore the
1659 stack pointer around the call. This saves stack space, but
1660 also is required if this inline is being done between two
1661 pushes. */
1662 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1663 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1665 /* Now copy the insns one by one. Do this in two passes, first the insns and
1666 then their REG_NOTES, just like save_for_inline. */
1668 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1670 for (insn = insns; insn; insn = NEXT_INSN (insn))
1672 rtx copy, pattern, set;
1674 map->orig_asm_operands_vector = 0;
1676 switch (GET_CODE (insn))
1678 case INSN:
1679 pattern = PATTERN (insn);
1680 set = single_set (insn);
1681 copy = 0;
1682 if (GET_CODE (pattern) == USE
1683 && GET_CODE (XEXP (pattern, 0)) == REG
1684 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1685 /* The (USE (REG n)) at return from the function should
1686 be ignored since we are changing (REG n) into
1687 inline_target. */
1688 break;
1690 /* Ignore setting a function value that we don't want to use. */
1691 if (map->inline_target == 0
1692 && set != 0
1693 && GET_CODE (SET_DEST (set)) == REG
1694 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1696 if (volatile_refs_p (SET_SRC (set)))
1698 rtx new_set;
1700 /* If we must not delete the source,
1701 load it into a new temporary. */
1702 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1704 new_set = single_set (copy);
1705 if (new_set == 0)
1706 abort ();
1708 SET_DEST (new_set)
1709 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1711 else
1712 break;
1715 /* If this is setting the static chain rtx, omit it. */
1716 else if (static_chain_value != 0
1717 && set != 0
1718 && GET_CODE (SET_DEST (set)) == REG
1719 && rtx_equal_p (SET_DEST (set),
1720 static_chain_incoming_rtx))
1721 break;
1723 /* If this is setting the static chain pseudo, set it from
1724 the value we want to give it instead. */
1725 else if (static_chain_value != 0
1726 && set != 0
1727 && rtx_equal_p (SET_SRC (set),
1728 static_chain_incoming_rtx))
1730 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1732 copy = emit_move_insn (newdest, static_chain_value);
1733 static_chain_value = 0;
1735 else
1736 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1737 /* REG_NOTES will be copied later. */
1739 #ifdef HAVE_cc0
1740 /* If this insn is setting CC0, it may need to look at
1741 the insn that uses CC0 to see what type of insn it is.
1742 In that case, the call to recog via validate_change will
1743 fail. So don't substitute constants here. Instead,
1744 do it when we emit the following insn.
1746 For example, see the pyr.md file. That machine has signed and
1747 unsigned compares. The compare patterns must check the
1748 following branch insn to see which what kind of compare to
1749 emit.
1751 If the previous insn set CC0, substitute constants on it as
1752 well. */
1753 if (sets_cc0_p (PATTERN (copy)) != 0)
1754 cc0_insn = copy;
1755 else
1757 if (cc0_insn)
1758 try_constants (cc0_insn, map);
1759 cc0_insn = 0;
1760 try_constants (copy, map);
1762 #else
1763 try_constants (copy, map);
1764 #endif
1765 break;
1767 case JUMP_INSN:
1768 if (GET_CODE (PATTERN (insn)) == RETURN)
1770 if (local_return_label == 0)
1771 local_return_label = gen_label_rtx ();
1772 pattern = gen_jump (local_return_label);
1774 else
1775 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1777 copy = emit_jump_insn (pattern);
1779 #ifdef HAVE_cc0
1780 if (cc0_insn)
1781 try_constants (cc0_insn, map);
1782 cc0_insn = 0;
1783 #endif
1784 try_constants (copy, map);
1786 /* If this used to be a conditional jump insn but whose branch
1787 direction is now know, we must do something special. */
1788 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1790 #ifdef HAVE_cc0
1791 /* The previous insn set cc0 for us. So delete it. */
1792 delete_insn (PREV_INSN (copy));
1793 #endif
1795 /* If this is now a no-op, delete it. */
1796 if (map->last_pc_value == pc_rtx)
1798 delete_insn (copy);
1799 copy = 0;
1801 else
1802 /* Otherwise, this is unconditional jump so we must put a
1803 BARRIER after it. We could do some dead code elimination
1804 here, but jump.c will do it just as well. */
1805 emit_barrier ();
1807 break;
1809 case CALL_INSN:
1810 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1811 copy = emit_call_insn (pattern);
1813 /* Because the USAGE information potentially contains objects other
1814 than hard registers, we need to copy it. */
1815 CALL_INSN_FUNCTION_USAGE (copy) =
1816 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1818 #ifdef HAVE_cc0
1819 if (cc0_insn)
1820 try_constants (cc0_insn, map);
1821 cc0_insn = 0;
1822 #endif
1823 try_constants (copy, map);
1825 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1827 map->const_equiv_map[i] = 0;
1828 break;
1830 case CODE_LABEL:
1831 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1832 LABEL_NAME (copy) = LABEL_NAME (insn);
1833 map->const_age++;
1834 break;
1836 case BARRIER:
1837 copy = emit_barrier ();
1838 break;
1840 case NOTE:
1841 /* It is important to discard function-end and function-beg notes,
1842 so we have only one of each in the current function.
1843 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1844 deleted these in the copy used for continuing compilation,
1845 not the copy used for inlining). */
1846 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1847 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1848 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1849 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1850 else
1851 copy = 0;
1852 break;
1854 default:
1855 abort ();
1856 break;
1859 if (copy)
1860 RTX_INTEGRATED_P (copy) = 1;
1862 map->insn_map[INSN_UID (insn)] = copy;
1865 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1866 from parameters can be substituted in. These are the only ones that
1867 are valid across the entire function. */
1868 map->const_age++;
1869 for (insn = insns; insn; insn = NEXT_INSN (insn))
1870 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1871 && map->insn_map[INSN_UID (insn)]
1872 && REG_NOTES (insn))
1874 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1875 /* We must also do subst_constants, in case one of our parameters
1876 has const type and constant value. */
1877 subst_constants (&tem, NULL_RTX, map);
1878 apply_change_group ();
1879 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1882 if (local_return_label)
1883 emit_label (local_return_label);
1885 /* Restore the stack pointer if we saved it above. */
1886 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1887 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1889 /* Make copies of the decls of the symbols in the inline function, so that
1890 the copies of the variables get declared in the current function. Set
1891 up things so that lookup_static_chain knows that to interpret registers
1892 in SAVE_EXPRs for TYPE_SIZEs as local. */
1894 inline_function_decl = fndecl;
1895 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1896 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1897 inline_function_decl = 0;
1899 /* End the scope containing the copied formal parameter variables
1900 and copied LABEL_DECLs. */
1902 expand_end_bindings (getdecls (), 1, 1);
1903 block = poplevel (1, 1, 0);
1904 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1905 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1906 poplevel (0, 0, 0);
1907 emit_line_note (input_filename, lineno);
1909 if (structure_value_addr)
1911 target = gen_rtx (MEM, TYPE_MODE (type),
1912 memory_address (TYPE_MODE (type), structure_value_addr));
1913 MEM_IN_STRUCT_P (target) = 1;
1915 return target;
1918 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1919 push all of those decls and give each one the corresponding home. */
1921 static void
1922 integrate_parm_decls (args, map, arg_vector)
1923 tree args;
1924 struct inline_remap *map;
1925 rtvec arg_vector;
1927 register tree tail;
1928 register int i;
1930 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1932 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1933 TREE_TYPE (tail));
1934 rtx new_decl_rtl
1935 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1937 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1938 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1939 here, but that's going to require some more work. */
1940 /* DECL_INCOMING_RTL (decl) = ?; */
1941 /* These args would always appear unused, if not for this. */
1942 TREE_USED (decl) = 1;
1943 /* Prevent warning for shadowing with these. */
1944 DECL_ABSTRACT_ORIGIN (decl) = tail;
1945 pushdecl (decl);
1946 /* Fully instantiate the address with the equivalent form so that the
1947 debugging information contains the actual register, instead of the
1948 virtual register. Do this by not passing an insn to
1949 subst_constants. */
1950 subst_constants (&new_decl_rtl, NULL_RTX, map);
1951 apply_change_group ();
1952 DECL_RTL (decl) = new_decl_rtl;
1956 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1957 current function a tree of contexts isomorphic to the one that is given.
1959 LEVEL indicates how far down into the BLOCK tree is the node we are
1960 currently traversing. It is always zero except for recursive calls.
1962 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1963 registers used in the DECL_RTL field should be remapped. If it is zero,
1964 no mapping is necessary. */
1966 static void
1967 integrate_decl_tree (let, level, map)
1968 tree let;
1969 int level;
1970 struct inline_remap *map;
1972 tree t, node;
1974 if (level > 0)
1975 pushlevel (0);
1977 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1979 tree d;
1981 push_obstacks_nochange ();
1982 saveable_allocation ();
1983 d = copy_node (t);
1984 pop_obstacks ();
1986 if (DECL_RTL (t) != 0)
1988 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1989 /* Fully instantiate the address with the equivalent form so that the
1990 debugging information contains the actual register, instead of the
1991 virtual register. Do this by not passing an insn to
1992 subst_constants. */
1993 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1994 apply_change_group ();
1996 /* These args would always appear unused, if not for this. */
1997 TREE_USED (d) = 1;
1998 /* Prevent warning for shadowing with these. */
1999 DECL_ABSTRACT_ORIGIN (d) = t;
2001 if (DECL_LANG_SPECIFIC (d))
2002 copy_lang_decl (d);
2004 pushdecl (d);
2007 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2008 integrate_decl_tree (t, level + 1, map);
2010 if (level > 0)
2012 node = poplevel (1, 0, 0);
2013 if (node)
2015 TREE_USED (node) = TREE_USED (let);
2016 BLOCK_ABSTRACT_ORIGIN (node) = let;
2021 /* Create a new copy of an rtx.
2022 Recursively copies the operands of the rtx,
2023 except for those few rtx codes that are sharable.
2025 We always return an rtx that is similar to that incoming rtx, with the
2026 exception of possibly changing a REG to a SUBREG or vice versa. No
2027 rtl is ever emitted.
2029 Handle constants that need to be placed in the constant pool by
2030 calling `force_const_mem'. */
2033 copy_rtx_and_substitute (orig, map)
2034 register rtx orig;
2035 struct inline_remap *map;
2037 register rtx copy, temp;
2038 register int i, j;
2039 register RTX_CODE code;
2040 register enum machine_mode mode;
2041 register char *format_ptr;
2042 int regno;
2044 if (orig == 0)
2045 return 0;
2047 code = GET_CODE (orig);
2048 mode = GET_MODE (orig);
2050 switch (code)
2052 case REG:
2053 /* If the stack pointer register shows up, it must be part of
2054 stack-adjustments (*not* because we eliminated the frame pointer!).
2055 Small hard registers are returned as-is. Pseudo-registers
2056 go through their `reg_map'. */
2057 regno = REGNO (orig);
2058 if (regno <= LAST_VIRTUAL_REGISTER)
2060 /* Some hard registers are also mapped,
2061 but others are not translated. */
2062 if (map->reg_map[regno] != 0)
2063 return map->reg_map[regno];
2065 /* If this is the virtual frame pointer, make space in current
2066 function's stack frame for the stack frame of the inline function.
2068 Copy the address of this area into a pseudo. Map
2069 virtual_stack_vars_rtx to this pseudo and set up a constant
2070 equivalence for it to be the address. This will substitute the
2071 address into insns where it can be substituted and use the new
2072 pseudo where it can't. */
2073 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2075 rtx loc, seq;
2076 int size = DECL_FRAME_SIZE (map->fndecl);
2077 int rounded;
2079 start_sequence ();
2080 loc = assign_stack_temp (BLKmode, size, 1);
2081 loc = XEXP (loc, 0);
2082 #ifdef FRAME_GROWS_DOWNWARD
2083 /* In this case, virtual_stack_vars_rtx points to one byte
2084 higher than the top of the frame area. So compute the offset
2085 to one byte higher than our substitute frame.
2086 Keep the fake frame pointer aligned like a real one. */
2087 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2088 loc = plus_constant (loc, rounded);
2089 #endif
2090 map->reg_map[regno] = temp
2091 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2093 if (REGNO (temp) < map->const_equiv_map_size)
2095 map->const_equiv_map[REGNO (temp)] = loc;
2096 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2099 seq = gen_sequence ();
2100 end_sequence ();
2101 emit_insn_after (seq, map->insns_at_start);
2102 return temp;
2104 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2106 /* Do the same for a block to contain any arguments referenced
2107 in memory. */
2108 rtx loc, seq;
2109 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2111 start_sequence ();
2112 loc = assign_stack_temp (BLKmode, size, 1);
2113 loc = XEXP (loc, 0);
2114 /* When arguments grow downward, the virtual incoming
2115 args pointer points to the top of the argument block,
2116 so the remapped location better do the same. */
2117 #ifdef ARGS_GROW_DOWNWARD
2118 loc = plus_constant (loc, size);
2119 #endif
2120 map->reg_map[regno] = temp
2121 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2123 if (REGNO (temp) < map->const_equiv_map_size)
2125 map->const_equiv_map[REGNO (temp)] = loc;
2126 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2129 seq = gen_sequence ();
2130 end_sequence ();
2131 emit_insn_after (seq, map->insns_at_start);
2132 return temp;
2134 else if (REG_FUNCTION_VALUE_P (orig))
2136 /* This is a reference to the function return value. If
2137 the function doesn't have a return value, error. If the
2138 mode doesn't agree, make a SUBREG. */
2139 if (map->inline_target == 0)
2140 /* Must be unrolling loops or replicating code if we
2141 reach here, so return the register unchanged. */
2142 return orig;
2143 else if (mode != GET_MODE (map->inline_target))
2144 return gen_lowpart (mode, map->inline_target);
2145 else
2146 return map->inline_target;
2148 return orig;
2150 if (map->reg_map[regno] == NULL)
2152 map->reg_map[regno] = gen_reg_rtx (mode);
2153 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2154 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2155 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2156 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2158 return map->reg_map[regno];
2160 case SUBREG:
2161 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2162 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2163 if (GET_CODE (copy) == SUBREG)
2164 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2165 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2166 else if (GET_CODE (copy) == CONCAT)
2167 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2168 else
2169 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2170 SUBREG_WORD (orig));
2172 case USE:
2173 case CLOBBER:
2174 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2175 to (use foo) if the original insn didn't have a subreg.
2176 Removing the subreg distorts the VAX movstrhi pattern
2177 by changing the mode of an operand. */
2178 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2179 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2180 copy = SUBREG_REG (copy);
2181 return gen_rtx (code, VOIDmode, copy);
2183 case CODE_LABEL:
2184 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2185 = LABEL_PRESERVE_P (orig);
2186 return map->label_map[CODE_LABEL_NUMBER (orig)];
2188 case LABEL_REF:
2189 copy = gen_rtx (LABEL_REF, mode,
2190 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2191 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2192 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2194 /* The fact that this label was previously nonlocal does not mean
2195 it still is, so we must check if it is within the range of
2196 this function's labels. */
2197 LABEL_REF_NONLOCAL_P (copy)
2198 = (LABEL_REF_NONLOCAL_P (orig)
2199 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2200 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2202 /* If we have made a nonlocal label local, it means that this
2203 inlined call will be refering to our nonlocal goto handler.
2204 So make sure we create one for this block; we normally would
2205 not since this is not otherwise considered a "call". */
2206 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2207 function_call_count++;
2209 return copy;
2211 case PC:
2212 case CC0:
2213 case CONST_INT:
2214 return orig;
2216 case SYMBOL_REF:
2217 /* Symbols which represent the address of a label stored in the constant
2218 pool must be modified to point to a constant pool entry for the
2219 remapped label. Otherwise, symbols are returned unchanged. */
2220 if (CONSTANT_POOL_ADDRESS_P (orig))
2222 rtx constant = get_pool_constant (orig);
2223 if (GET_CODE (constant) == LABEL_REF)
2224 return XEXP (force_const_mem (Pmode,
2225 copy_rtx_and_substitute (constant,
2226 map)),
2230 return orig;
2232 case CONST_DOUBLE:
2233 /* We have to make a new copy of this CONST_DOUBLE because don't want
2234 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2235 duplicate of a CONST_DOUBLE we have already seen. */
2236 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2238 REAL_VALUE_TYPE d;
2240 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2241 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2243 else
2244 return immed_double_const (CONST_DOUBLE_LOW (orig),
2245 CONST_DOUBLE_HIGH (orig), VOIDmode);
2247 case CONST:
2248 /* Make new constant pool entry for a constant
2249 that was in the pool of the inline function. */
2250 if (RTX_INTEGRATED_P (orig))
2252 /* If this was an address of a constant pool entry that itself
2253 had to be placed in the constant pool, it might not be a
2254 valid address. So the recursive call below might turn it
2255 into a register. In that case, it isn't a constant any
2256 more, so return it. This has the potential of changing a
2257 MEM into a REG, but we'll assume that it safe. */
2258 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2259 if (! CONSTANT_P (temp))
2260 return temp;
2261 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2263 break;
2265 case ADDRESS:
2266 /* If from constant pool address, make new constant pool entry and
2267 return its address. */
2268 if (! RTX_INTEGRATED_P (orig))
2269 abort ();
2271 temp = force_const_mem (GET_MODE (orig),
2272 copy_rtx_and_substitute (XEXP (orig, 0), map));
2274 #if 0
2275 /* Legitimizing the address here is incorrect.
2277 The only ADDRESS rtx's that can reach here are ones created by
2278 save_constants. Hence the operand of the ADDRESS is always valid
2279 in this position of the instruction, since the original rtx without
2280 the ADDRESS was valid.
2282 The reason we don't legitimize the address here is that on the
2283 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2284 This code forces the operand of the address to a register, which
2285 fails because we can not take the HIGH part of a register.
2287 Also, change_address may create new registers. These registers
2288 will not have valid reg_map entries. This can cause try_constants()
2289 to fail because assumes that all registers in the rtx have valid
2290 reg_map entries, and it may end up replacing one of these new
2291 registers with junk. */
2293 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2294 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2295 #endif
2297 return XEXP (temp, 0);
2299 case ASM_OPERANDS:
2300 /* If a single asm insn contains multiple output operands
2301 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2302 We must make sure that the copied insn continues to share it. */
2303 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2305 copy = rtx_alloc (ASM_OPERANDS);
2306 copy->volatil = orig->volatil;
2307 XSTR (copy, 0) = XSTR (orig, 0);
2308 XSTR (copy, 1) = XSTR (orig, 1);
2309 XINT (copy, 2) = XINT (orig, 2);
2310 XVEC (copy, 3) = map->copy_asm_operands_vector;
2311 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2312 XSTR (copy, 5) = XSTR (orig, 5);
2313 XINT (copy, 6) = XINT (orig, 6);
2314 return copy;
2316 break;
2318 case CALL:
2319 /* This is given special treatment because the first
2320 operand of a CALL is a (MEM ...) which may get
2321 forced into a register for cse. This is undesirable
2322 if function-address cse isn't wanted or if we won't do cse. */
2323 #ifndef NO_FUNCTION_CSE
2324 if (! (optimize && ! flag_no_function_cse))
2325 #endif
2326 return gen_rtx (CALL, GET_MODE (orig),
2327 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2328 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2329 copy_rtx_and_substitute (XEXP (orig, 1), map));
2330 break;
2332 #if 0
2333 /* Must be ifdefed out for loop unrolling to work. */
2334 case RETURN:
2335 abort ();
2336 #endif
2338 case SET:
2339 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2340 Don't alter that.
2341 If the nonlocal goto is into the current function,
2342 this will result in unnecessarily bad code, but should work. */
2343 if (SET_DEST (orig) == virtual_stack_vars_rtx
2344 || SET_DEST (orig) == virtual_incoming_args_rtx)
2345 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2346 copy_rtx_and_substitute (SET_SRC (orig), map));
2347 break;
2349 case MEM:
2350 copy = rtx_alloc (MEM);
2351 PUT_MODE (copy, mode);
2352 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2353 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2354 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2356 /* If doing function inlining, this MEM might not be const in the
2357 function that it is being inlined into, and thus may not be
2358 unchanging after function inlining. Constant pool references are
2359 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2360 for them. */
2361 if (! map->integrating)
2362 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2364 return copy;
2367 copy = rtx_alloc (code);
2368 PUT_MODE (copy, mode);
2369 copy->in_struct = orig->in_struct;
2370 copy->volatil = orig->volatil;
2371 copy->unchanging = orig->unchanging;
2373 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2375 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2377 switch (*format_ptr++)
2379 case '0':
2380 break;
2382 case 'e':
2383 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2384 break;
2386 case 'u':
2387 /* Change any references to old-insns to point to the
2388 corresponding copied insns. */
2389 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2390 break;
2392 case 'E':
2393 XVEC (copy, i) = XVEC (orig, i);
2394 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2396 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2397 for (j = 0; j < XVECLEN (copy, i); j++)
2398 XVECEXP (copy, i, j)
2399 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2401 break;
2403 case 'w':
2404 XWINT (copy, i) = XWINT (orig, i);
2405 break;
2407 case 'i':
2408 XINT (copy, i) = XINT (orig, i);
2409 break;
2411 case 's':
2412 XSTR (copy, i) = XSTR (orig, i);
2413 break;
2415 default:
2416 abort ();
2420 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2422 map->orig_asm_operands_vector = XVEC (orig, 3);
2423 map->copy_asm_operands_vector = XVEC (copy, 3);
2424 map->copy_asm_constraints_vector = XVEC (copy, 4);
2427 return copy;
2430 /* Substitute known constant values into INSN, if that is valid. */
2432 void
2433 try_constants (insn, map)
2434 rtx insn;
2435 struct inline_remap *map;
2437 int i;
2439 map->num_sets = 0;
2440 subst_constants (&PATTERN (insn), insn, map);
2442 /* Apply the changes if they are valid; otherwise discard them. */
2443 apply_change_group ();
2445 /* Show we don't know the value of anything stored or clobbered. */
2446 note_stores (PATTERN (insn), mark_stores);
2447 map->last_pc_value = 0;
2448 #ifdef HAVE_cc0
2449 map->last_cc0_value = 0;
2450 #endif
2452 /* Set up any constant equivalences made in this insn. */
2453 for (i = 0; i < map->num_sets; i++)
2455 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2457 int regno = REGNO (map->equiv_sets[i].dest);
2459 if (regno < map->const_equiv_map_size
2460 && (map->const_equiv_map[regno] == 0
2461 /* Following clause is a hack to make case work where GNU C++
2462 reassigns a variable to make cse work right. */
2463 || ! rtx_equal_p (map->const_equiv_map[regno],
2464 map->equiv_sets[i].equiv)))
2466 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2467 map->const_age_map[regno] = map->const_age;
2470 else if (map->equiv_sets[i].dest == pc_rtx)
2471 map->last_pc_value = map->equiv_sets[i].equiv;
2472 #ifdef HAVE_cc0
2473 else if (map->equiv_sets[i].dest == cc0_rtx)
2474 map->last_cc0_value = map->equiv_sets[i].equiv;
2475 #endif
2479 /* Substitute known constants for pseudo regs in the contents of LOC,
2480 which are part of INSN.
2481 If INSN is zero, the substitution should always be done (this is used to
2482 update DECL_RTL).
2483 These changes are taken out by try_constants if the result is not valid.
2485 Note that we are more concerned with determining when the result of a SET
2486 is a constant, for further propagation, than actually inserting constants
2487 into insns; cse will do the latter task better.
2489 This function is also used to adjust address of items previously addressed
2490 via the virtual stack variable or virtual incoming arguments registers. */
2492 static void
2493 subst_constants (loc, insn, map)
2494 rtx *loc;
2495 rtx insn;
2496 struct inline_remap *map;
2498 rtx x = *loc;
2499 register int i;
2500 register enum rtx_code code;
2501 register char *format_ptr;
2502 int num_changes = num_validated_changes ();
2503 rtx new = 0;
2504 enum machine_mode op0_mode;
2506 code = GET_CODE (x);
2508 switch (code)
2510 case PC:
2511 case CONST_INT:
2512 case CONST_DOUBLE:
2513 case SYMBOL_REF:
2514 case CONST:
2515 case LABEL_REF:
2516 case ADDRESS:
2517 return;
2519 #ifdef HAVE_cc0
2520 case CC0:
2521 validate_change (insn, loc, map->last_cc0_value, 1);
2522 return;
2523 #endif
2525 case USE:
2526 case CLOBBER:
2527 /* The only thing we can do with a USE or CLOBBER is possibly do
2528 some substitutions in a MEM within it. */
2529 if (GET_CODE (XEXP (x, 0)) == MEM)
2530 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2531 return;
2533 case REG:
2534 /* Substitute for parms and known constants. Don't replace
2535 hard regs used as user variables with constants. */
2537 int regno = REGNO (x);
2539 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2540 && regno < map->const_equiv_map_size
2541 && map->const_equiv_map[regno] != 0
2542 && map->const_age_map[regno] >= map->const_age)
2543 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2544 return;
2547 case SUBREG:
2548 /* SUBREG applied to something other than a reg
2549 should be treated as ordinary, since that must
2550 be a special hack and we don't know how to treat it specially.
2551 Consider for example mulsidi3 in m68k.md.
2552 Ordinary SUBREG of a REG needs this special treatment. */
2553 if (GET_CODE (SUBREG_REG (x)) == REG)
2555 rtx inner = SUBREG_REG (x);
2556 rtx new = 0;
2558 /* We can't call subst_constants on &SUBREG_REG (x) because any
2559 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2560 see what is inside, try to form the new SUBREG and see if that is
2561 valid. We handle two cases: extracting a full word in an
2562 integral mode and extracting the low part. */
2563 subst_constants (&inner, NULL_RTX, map);
2565 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2566 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2567 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2568 new = operand_subword (inner, SUBREG_WORD (x), 0,
2569 GET_MODE (SUBREG_REG (x)));
2571 if (new == 0 && subreg_lowpart_p (x))
2572 new = gen_lowpart_common (GET_MODE (x), inner);
2574 if (new)
2575 validate_change (insn, loc, new, 1);
2577 return;
2579 break;
2581 case MEM:
2582 subst_constants (&XEXP (x, 0), insn, map);
2584 /* If a memory address got spoiled, change it back. */
2585 if (insn != 0 && num_validated_changes () != num_changes
2586 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2587 cancel_changes (num_changes);
2588 return;
2590 case SET:
2592 /* Substitute constants in our source, and in any arguments to a
2593 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2594 itself. */
2595 rtx *dest_loc = &SET_DEST (x);
2596 rtx dest = *dest_loc;
2597 rtx src, tem;
2599 subst_constants (&SET_SRC (x), insn, map);
2600 src = SET_SRC (x);
2602 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2603 /* By convention, we always use ZERO_EXTRACT in the dest. */
2604 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2605 || GET_CODE (*dest_loc) == SUBREG
2606 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2608 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2610 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2611 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2613 dest_loc = &XEXP (*dest_loc, 0);
2616 /* Do substitute in the address of a destination in memory. */
2617 if (GET_CODE (*dest_loc) == MEM)
2618 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2620 /* Check for the case of DEST a SUBREG, both it and the underlying
2621 register are less than one word, and the SUBREG has the wider mode.
2622 In the case, we are really setting the underlying register to the
2623 source converted to the mode of DEST. So indicate that. */
2624 if (GET_CODE (dest) == SUBREG
2625 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2626 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2627 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2628 <= GET_MODE_SIZE (GET_MODE (dest)))
2629 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2630 src)))
2631 src = tem, dest = SUBREG_REG (dest);
2633 /* If storing a recognizable value save it for later recording. */
2634 if ((map->num_sets < MAX_RECOG_OPERANDS)
2635 && (CONSTANT_P (src)
2636 || (GET_CODE (src) == REG
2637 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2638 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2639 || (GET_CODE (src) == PLUS
2640 && GET_CODE (XEXP (src, 0)) == REG
2641 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2642 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2643 && CONSTANT_P (XEXP (src, 1)))
2644 || GET_CODE (src) == COMPARE
2645 #ifdef HAVE_cc0
2646 || dest == cc0_rtx
2647 #endif
2648 || (dest == pc_rtx
2649 && (src == pc_rtx || GET_CODE (src) == RETURN
2650 || GET_CODE (src) == LABEL_REF))))
2652 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2653 it will cause us to save the COMPARE with any constants
2654 substituted, which is what we want for later. */
2655 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2656 map->equiv_sets[map->num_sets++].dest = dest;
2659 return;
2663 format_ptr = GET_RTX_FORMAT (code);
2665 /* If the first operand is an expression, save its mode for later. */
2666 if (*format_ptr == 'e')
2667 op0_mode = GET_MODE (XEXP (x, 0));
2669 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2671 switch (*format_ptr++)
2673 case '0':
2674 break;
2676 case 'e':
2677 if (XEXP (x, i))
2678 subst_constants (&XEXP (x, i), insn, map);
2679 break;
2681 case 'u':
2682 case 'i':
2683 case 's':
2684 case 'w':
2685 break;
2687 case 'E':
2688 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2690 int j;
2691 for (j = 0; j < XVECLEN (x, i); j++)
2692 subst_constants (&XVECEXP (x, i, j), insn, map);
2694 break;
2696 default:
2697 abort ();
2701 /* If this is a commutative operation, move a constant to the second
2702 operand unless the second operand is already a CONST_INT. */
2703 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2704 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2706 rtx tem = XEXP (x, 0);
2707 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2708 validate_change (insn, &XEXP (x, 1), tem, 1);
2711 /* Simplify the expression in case we put in some constants. */
2712 switch (GET_RTX_CLASS (code))
2714 case '1':
2715 new = simplify_unary_operation (code, GET_MODE (x),
2716 XEXP (x, 0), op0_mode);
2717 break;
2719 case '<':
2721 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2722 if (op_mode == VOIDmode)
2723 op_mode = GET_MODE (XEXP (x, 1));
2724 new = simplify_relational_operation (code, op_mode,
2725 XEXP (x, 0), XEXP (x, 1));
2726 #ifdef FLOAT_STORE_FLAG_VALUE
2727 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2728 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2729 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2730 GET_MODE (x)));
2731 #endif
2732 break;
2735 case '2':
2736 case 'c':
2737 new = simplify_binary_operation (code, GET_MODE (x),
2738 XEXP (x, 0), XEXP (x, 1));
2739 break;
2741 case 'b':
2742 case '3':
2743 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2744 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2745 break;
2748 if (new)
2749 validate_change (insn, loc, new, 1);
2752 /* Show that register modified no longer contain known constants. We are
2753 called from note_stores with parts of the new insn. */
2755 void
2756 mark_stores (dest, x)
2757 rtx dest;
2758 rtx x;
2760 int regno = -1;
2761 enum machine_mode mode;
2763 /* DEST is always the innermost thing set, except in the case of
2764 SUBREGs of hard registers. */
2766 if (GET_CODE (dest) == REG)
2767 regno = REGNO (dest), mode = GET_MODE (dest);
2768 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2770 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2771 mode = GET_MODE (SUBREG_REG (dest));
2774 if (regno >= 0)
2776 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2777 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2778 int i;
2780 for (i = regno; i <= last_reg; i++)
2781 if (i < global_const_equiv_map_size)
2782 global_const_equiv_map[i] = 0;
2786 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2787 pointed to by PX, they represent constants in the constant pool.
2788 Replace these with a new memory reference obtained from force_const_mem.
2789 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2790 address of a constant pool entry. Replace them with the address of
2791 a new constant pool entry obtained from force_const_mem. */
2793 static void
2794 restore_constants (px)
2795 rtx *px;
2797 rtx x = *px;
2798 int i, j;
2799 char *fmt;
2801 if (x == 0)
2802 return;
2804 if (GET_CODE (x) == CONST_DOUBLE)
2806 /* We have to make a new CONST_DOUBLE to ensure that we account for
2807 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2808 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2810 REAL_VALUE_TYPE d;
2812 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2813 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2815 else
2816 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2817 VOIDmode);
2820 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2822 restore_constants (&XEXP (x, 0));
2823 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2825 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2827 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2828 rtx new = XEXP (SUBREG_REG (x), 0);
2830 restore_constants (&new);
2831 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2832 PUT_MODE (new, GET_MODE (x));
2833 *px = validize_mem (new);
2835 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2837 restore_constants (&XEXP (x, 0));
2838 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2840 else
2842 fmt = GET_RTX_FORMAT (GET_CODE (x));
2843 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2845 switch (*fmt++)
2847 case 'E':
2848 for (j = 0; j < XVECLEN (x, i); j++)
2849 restore_constants (&XVECEXP (x, i, j));
2850 break;
2852 case 'e':
2853 restore_constants (&XEXP (x, i));
2854 break;
2860 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2861 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2862 that it points to the node itself, thus indicating that the node is its
2863 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2864 the given node is NULL, recursively descend the decl/block tree which
2865 it is the root of, and for each other ..._DECL or BLOCK node contained
2866 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2867 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2868 values to point to themselves. */
2870 static void
2871 set_block_origin_self (stmt)
2872 register tree stmt;
2874 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2876 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2879 register tree local_decl;
2881 for (local_decl = BLOCK_VARS (stmt);
2882 local_decl != NULL_TREE;
2883 local_decl = TREE_CHAIN (local_decl))
2884 set_decl_origin_self (local_decl); /* Potential recursion. */
2888 register tree subblock;
2890 for (subblock = BLOCK_SUBBLOCKS (stmt);
2891 subblock != NULL_TREE;
2892 subblock = BLOCK_CHAIN (subblock))
2893 set_block_origin_self (subblock); /* Recurse. */
2898 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2899 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2900 node to so that it points to the node itself, thus indicating that the
2901 node represents its own (abstract) origin. Additionally, if the
2902 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2903 the decl/block tree of which the given node is the root of, and for
2904 each other ..._DECL or BLOCK node contained therein whose
2905 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2906 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2907 point to themselves. */
2909 static void
2910 set_decl_origin_self (decl)
2911 register tree decl;
2913 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2915 DECL_ABSTRACT_ORIGIN (decl) = decl;
2916 if (TREE_CODE (decl) == FUNCTION_DECL)
2918 register tree arg;
2920 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2921 DECL_ABSTRACT_ORIGIN (arg) = arg;
2922 if (DECL_INITIAL (decl) != NULL_TREE)
2923 set_block_origin_self (DECL_INITIAL (decl));
2928 /* Given a pointer to some BLOCK node, and a boolean value to set the
2929 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2930 the given block, and for all local decls and all local sub-blocks
2931 (recursively) which are contained therein. */
2933 static void
2934 set_block_abstract_flags (stmt, setting)
2935 register tree stmt;
2936 register int setting;
2938 BLOCK_ABSTRACT (stmt) = setting;
2941 register tree local_decl;
2943 for (local_decl = BLOCK_VARS (stmt);
2944 local_decl != NULL_TREE;
2945 local_decl = TREE_CHAIN (local_decl))
2946 set_decl_abstract_flags (local_decl, setting);
2950 register tree subblock;
2952 for (subblock = BLOCK_SUBBLOCKS (stmt);
2953 subblock != NULL_TREE;
2954 subblock = BLOCK_CHAIN (subblock))
2955 set_block_abstract_flags (subblock, setting);
2959 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2960 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2961 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2962 set the abstract flags for all of the parameters, local vars, local
2963 blocks and sub-blocks (recursively) to the same setting. */
2965 void
2966 set_decl_abstract_flags (decl, setting)
2967 register tree decl;
2968 register int setting;
2970 DECL_ABSTRACT (decl) = setting;
2971 if (TREE_CODE (decl) == FUNCTION_DECL)
2973 register tree arg;
2975 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2976 DECL_ABSTRACT (arg) = setting;
2977 if (DECL_INITIAL (decl) != NULL_TREE)
2978 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2982 /* Output the assembly language code for the function FNDECL
2983 from its DECL_SAVED_INSNS. Used for inline functions that are output
2984 at end of compilation instead of where they came in the source. */
2986 void
2987 output_inline_function (fndecl)
2988 tree fndecl;
2990 rtx head;
2991 rtx last;
2993 if (output_bytecode)
2995 warning ("`inline' ignored for bytecode output");
2996 return;
2999 head = DECL_SAVED_INSNS (fndecl);
3000 current_function_decl = fndecl;
3002 /* This call is only used to initialize global variables. */
3003 init_function_start (fndecl, "lossage", 1);
3005 /* Redo parameter determinations in case the FUNCTION_...
3006 macros took machine-specific actions that need to be redone. */
3007 assign_parms (fndecl, 1);
3009 /* Set stack frame size. */
3010 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3012 restore_reg_data (FIRST_PARM_INSN (head));
3014 stack_slot_list = STACK_SLOT_LIST (head);
3016 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3017 current_function_calls_alloca = 1;
3019 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3020 current_function_calls_setjmp = 1;
3022 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3023 current_function_calls_longjmp = 1;
3025 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3026 current_function_returns_struct = 1;
3028 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3029 current_function_returns_pcc_struct = 1;
3031 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3032 current_function_needs_context = 1;
3034 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3035 current_function_has_nonlocal_label = 1;
3037 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3038 current_function_returns_pointer = 1;
3040 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3041 current_function_uses_const_pool = 1;
3043 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3044 current_function_uses_pic_offset_table = 1;
3046 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3047 current_function_pops_args = POPS_ARGS (head);
3049 /* There is no need to output a return label again. */
3050 return_label = 0;
3052 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl), 0);
3054 /* Find last insn and rebuild the constant pool. */
3055 for (last = FIRST_PARM_INSN (head);
3056 NEXT_INSN (last); last = NEXT_INSN (last))
3058 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3060 restore_constants (&PATTERN (last));
3061 restore_constants (&REG_NOTES (last));
3065 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3066 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3068 /* We must have already output DWARF debugging information for the
3069 original (abstract) inline function declaration/definition, so
3070 we want to make sure that the debugging information we generate
3071 for this special instance of the inline function refers back to
3072 the information we already generated. To make sure that happens,
3073 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3074 node (and for all of the local ..._DECL nodes which are its children)
3075 so that they all point to themselves. */
3077 set_decl_origin_self (fndecl);
3079 /* We're not deferring this any longer. */
3080 DECL_DEFER_OUTPUT (fndecl) = 0;
3082 /* Compile this function all the way down to assembly code. */
3083 rest_of_compilation (fndecl);
3085 current_function_decl = 0;