1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "insn-config.h"
32 #include "insn-flags.h"
36 #include "integrate.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack
*function_maybepermanent_obstack
;
50 /* Similar, but round to the next highest integer that meets the
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
65 static rtvec initialize_for_inline
PROTO((tree
));
66 static void note_modified_parmregs
PROTO((rtx
, rtx
, void *));
67 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*,
69 static tree integrate_decl_tree
PROTO((tree
,
70 struct inline_remap
*));
71 static void subst_constants
PROTO((rtx
*, rtx
,
72 struct inline_remap
*, int));
73 static void set_block_origin_self
PROTO((tree
));
74 static void set_decl_origin_self
PROTO((tree
));
75 static void set_block_abstract_flags
PROTO((tree
, int));
76 static void process_reg_param
PROTO((struct inline_remap
*, rtx
,
78 void set_decl_abstract_flags
PROTO((tree
, int));
79 static tree copy_and_set_decl_abstract_origin
PROTO((tree
));
80 static rtx expand_inline_function_eh_labelmap
PROTO((rtx
));
81 static void mark_stores
PROTO((rtx
, rtx
, void *));
83 /* The maximum number of instructions accepted for inlining a
84 function. Increasing values mean more agressive inlining.
85 This affects currently only functions explicitly marked as
86 inline (or methods defined within the class definition for C++).
87 The default value of 10000 is arbitrary but high to match the
88 previously unlimited gcc capabilities. */
90 int inline_max_insns
= 10000;
92 /* Used by copy_rtx_and_substitute; this indicates whether the function is
93 called for the purpose of inlining or some other purpose (i.e. loop
94 unrolling). This affects how constant pool references are handled.
95 This variable contains the FUNCTION_DECL for the inlined function. */
96 static struct function
*inlining
= 0;
98 /* Returns the Ith entry in the label_map contained in MAP. If the
99 Ith entry has not yet been set, return a fresh label. This function
100 performs a lazy initialization of label_map, thereby avoiding huge memory
101 explosions when the label_map gets very large. */
104 get_label_from_map (map
, i
)
105 struct inline_remap
*map
;
108 rtx x
= map
->label_map
[i
];
111 x
= map
->label_map
[i
] = gen_label_rtx();
116 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
117 is safe and reasonable to integrate into other functions.
118 Nonzero means value is a warning msgid with a single %s
119 for the function's name. */
122 function_cannot_inline_p (fndecl
)
123 register tree fndecl
;
126 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
128 /* For functions marked as inline increase the maximum size to
129 inline_max_insns (-finline-limit-<n>). For regular functions
130 use the limit given by INTEGRATE_THRESHOLD. */
132 int max_insns
= (DECL_INLINE (fndecl
))
134 + 8 * list_length (DECL_ARGUMENTS (fndecl
)))
135 : INTEGRATE_THRESHOLD (fndecl
);
137 register int ninsns
= 0;
141 /* No inlines with varargs. */
142 if ((last
&& TREE_VALUE (last
) != void_type_node
)
143 || current_function_varargs
)
144 return N_("varargs function cannot be inline");
146 if (current_function_calls_alloca
)
147 return N_("function using alloca cannot be inline");
149 if (current_function_calls_setjmp
)
150 return N_("function using setjmp cannot be inline");
152 if (current_function_contains_functions
)
153 return N_("function with nested functions cannot be inline");
157 N_("function with label addresses used in initializers cannot inline");
159 if (current_function_cannot_inline
)
160 return current_function_cannot_inline
;
162 /* If its not even close, don't even look. */
163 if (get_max_uid () > 3 * max_insns
)
164 return N_("function too large to be inline");
167 /* Don't inline functions which do not specify a function prototype and
168 have BLKmode argument or take the address of a parameter. */
169 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
171 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
172 TREE_ADDRESSABLE (parms
) = 1;
173 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
174 return N_("no prototype, and parameter address used; cannot be inline");
178 /* We can't inline functions that return structures
179 the old-fashioned PCC way, copying into a static block. */
180 if (current_function_returns_pcc_struct
)
181 return N_("inline functions not supported for this return value type");
183 /* We can't inline functions that return structures of varying size. */
184 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
185 return N_("function with varying-size return value cannot be inline");
187 /* Cannot inline a function with a varying size argument or one that
188 receives a transparent union. */
189 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
191 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
192 return N_("function with varying-size parameter cannot be inline");
193 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
194 return N_("function with transparent unit parameter cannot be inline");
197 if (get_max_uid () > max_insns
)
199 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
200 insn
&& ninsns
< max_insns
;
201 insn
= NEXT_INSN (insn
))
202 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
205 if (ninsns
>= max_insns
)
206 return N_("function too large to be inline");
209 /* We will not inline a function which uses computed goto. The addresses of
210 its local labels, which may be tucked into global storage, are of course
211 not constant across instantiations, which causes unexpected behaviour. */
212 if (current_function_has_computed_jump
)
213 return N_("function with computed jump cannot inline");
215 /* We cannot inline a nested function that jumps to a nonlocal label. */
216 if (current_function_has_nonlocal_goto
)
217 return N_("function with nonlocal goto cannot be inline");
219 /* This is a hack, until the inliner is taught about eh regions at
220 the start of the function. */
221 for (insn
= get_insns ();
223 && ! (GET_CODE (insn
) == NOTE
224 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
);
225 insn
= NEXT_INSN (insn
))
227 if (insn
&& GET_CODE (insn
) == NOTE
228 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EH_REGION_BEG
)
229 return N_("function with complex parameters cannot be inline");
232 /* We can't inline functions that return a PARALLEL rtx. */
233 result
= DECL_RTL (DECL_RESULT (fndecl
));
234 if (result
&& GET_CODE (result
) == PARALLEL
)
235 return N_("inline functions not supported for this return value type");
240 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
241 Zero for a reg that isn't a parm's home.
242 Only reg numbers less than max_parm_reg are mapped here. */
243 static tree
*parmdecl_map
;
245 /* In save_for_inline, nonzero if past the parm-initialization insns. */
246 static int in_nonparm_insns
;
248 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
249 needed to save FNDECL's insns and info for future inline expansion. */
252 initialize_for_inline (fndecl
)
259 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
260 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
261 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
263 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
265 parms
= TREE_CHAIN (parms
), i
++)
267 rtx p
= DECL_RTL (parms
);
269 /* If we have (mem (addressof (mem ...))), use the inner MEM since
270 otherwise the copy_rtx call below will not unshare the MEM since
271 it shares ADDRESSOF. */
272 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
273 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
274 p
= XEXP (XEXP (p
, 0), 0);
276 RTVEC_ELT (arg_vector
, i
) = p
;
278 if (GET_CODE (p
) == REG
)
279 parmdecl_map
[REGNO (p
)] = parms
;
280 else if (GET_CODE (p
) == CONCAT
)
282 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
283 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
285 if (GET_CODE (preal
) == REG
)
286 parmdecl_map
[REGNO (preal
)] = parms
;
287 if (GET_CODE (pimag
) == REG
)
288 parmdecl_map
[REGNO (pimag
)] = parms
;
291 /* This flag is cleared later
292 if the function ever modifies the value of the parm. */
293 TREE_READONLY (parms
) = 1;
299 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
300 DECL_ABSTRACT_ORIGIN for the new accordinly. */
303 copy_and_set_decl_abstract_origin (node
)
306 tree copy
= copy_node (node
);
307 if (DECL_ABSTRACT_ORIGIN (copy
) != NULL_TREE
)
308 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
309 situation occurs if we inline a function which itself made
310 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
311 most distant ancestor, we don't have to do anything here. */
314 /* The most distant ancestor must be NODE. */
315 DECL_ABSTRACT_ORIGIN (copy
) = node
;
320 /* Make the insns and PARM_DECLs of the current function permanent
321 and record other information in DECL_SAVED_INSNS to allow inlining
322 of this function in subsequent calls.
324 This routine need not copy any insns because we are not going
325 to immediately compile the insns in the insn chain. There
326 are two cases when we would compile the insns for FNDECL:
327 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
328 be output at the end of other compilation, because somebody took
329 its address. In the first case, the insns of FNDECL are copied
330 as it is expanded inline, so FNDECL's saved insns are not
331 modified. In the second case, FNDECL is used for the last time,
332 so modifying the rtl is not a problem.
334 We don't have to worry about FNDECL being inline expanded by
335 other functions which are written at the end of compilation
336 because flag_no_inline is turned on when we begin writing
337 functions at the end of compilation. */
340 save_for_inline_nocopy (fndecl
)
345 rtx first_nonparm_insn
;
347 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
348 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
349 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
350 for the parms, prior to elimination of virtual registers.
351 These values are needed for substituting parms properly. */
353 parmdecl_map
= (tree
*) xmalloc (max_parm_reg
* sizeof (tree
));
355 /* Make and emit a return-label if we have not already done so. */
357 if (return_label
== 0)
359 return_label
= gen_label_rtx ();
360 emit_label (return_label
);
363 argvec
= initialize_for_inline (fndecl
);
365 /* If there are insns that copy parms from the stack into pseudo registers,
366 those insns are not copied. `expand_inline_function' must
367 emit the correct code to handle such things. */
370 if (GET_CODE (insn
) != NOTE
)
373 /* Get the insn which signals the end of parameter setup code. */
374 first_nonparm_insn
= get_first_nonparm_insn ();
376 /* Now just scan the chain of insns to see what happens to our
377 PARM_DECLs. If a PARM_DECL is used but never modified, we
378 can substitute its rtl directly when expanding inline (and
379 perform constant folding when its incoming value is constant).
380 Otherwise, we have to copy its value into a new register and track
381 the new register's life. */
383 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
385 if (insn
== first_nonparm_insn
)
386 in_nonparm_insns
= 1;
388 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
389 /* Record what interesting things happen to our parameters. */
390 note_stores (PATTERN (insn
), note_modified_parmregs
, NULL
);
393 /* We have now allocated all that needs to be allocated permanently
394 on the rtx obstack. Set our high-water mark, so that we
395 can free the rest of this when the time comes. */
399 current_function
->inl_max_label_num
= max_label_num ();
400 current_function
->inl_last_parm_insn
= current_function
->x_last_parm_insn
;
401 current_function
->original_arg_vector
= argvec
;
402 current_function
->original_decl_initial
= DECL_INITIAL (fndecl
);
403 DECL_SAVED_INSNS (fndecl
) = current_function
;
409 /* Note whether a parameter is modified or not. */
412 note_modified_parmregs (reg
, x
, data
)
414 rtx x ATTRIBUTE_UNUSED
;
415 void *data ATTRIBUTE_UNUSED
;
417 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
418 && REGNO (reg
) < max_parm_reg
419 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
420 && parmdecl_map
[REGNO (reg
)] != 0)
421 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
424 /* Unfortunately, we need a global copy of const_equiv map for communication
425 with a function called from note_stores. Be *very* careful that this
426 is used properly in the presence of recursion. */
428 varray_type global_const_equiv_varray
;
430 #define FIXED_BASE_PLUS_P(X) \
431 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
432 && GET_CODE (XEXP (X, 0)) == REG \
433 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
434 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
436 /* Called to set up a mapping for the case where a parameter is in a
437 register. If it is read-only and our argument is a constant, set up the
438 constant equivalence.
440 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
443 Also, don't allow hard registers here; they might not be valid when
444 substituted into insns. */
446 process_reg_param (map
, loc
, copy
)
447 struct inline_remap
*map
;
450 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
451 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
452 && ! REG_USERVAR_P (copy
))
453 || (GET_CODE (copy
) == REG
454 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
456 rtx temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
457 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
458 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
459 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
462 map
->reg_map
[REGNO (loc
)] = copy
;
465 /* Used by duplicate_eh_handlers to map labels for the exception table */
466 static struct inline_remap
*eif_eh_map
;
469 expand_inline_function_eh_labelmap (label
)
472 int index
= CODE_LABEL_NUMBER (label
);
473 return get_label_from_map (eif_eh_map
, index
);
476 /* Integrate the procedure defined by FNDECL. Note that this function
477 may wind up calling itself. Since the static variables are not
478 reentrant, we do not assign them until after the possibility
479 of recursion is eliminated.
481 If IGNORE is nonzero, do not produce a value.
482 Otherwise store the value in TARGET if it is nonzero and that is convenient.
485 (rtx)-1 if we could not substitute the function
486 0 if we substituted it and it does not produce a value
487 else an rtx for where the value is stored. */
490 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
491 structure_value_addr
)
496 rtx structure_value_addr
;
498 struct function
*inlining_previous
;
499 struct function
*inl_f
= DECL_SAVED_INSNS (fndecl
);
500 tree formal
, actual
, block
;
501 rtx parm_insns
= inl_f
->emit
->x_first_insn
;
502 rtx insns
= (inl_f
->inl_last_parm_insn
503 ? NEXT_INSN (inl_f
->inl_last_parm_insn
)
510 int min_labelno
= inl_f
->emit
->x_first_label_num
;
511 int max_labelno
= inl_f
->inl_max_label_num
;
513 rtx local_return_label
= 0;
517 struct inline_remap
*map
= 0;
521 rtvec arg_vector
= (rtvec
) inl_f
->original_arg_vector
;
522 rtx static_chain_value
= 0;
525 /* The pointer used to track the true location of the memory used
526 for MAP->LABEL_MAP. */
527 rtx
*real_label_map
= 0;
529 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
530 max_regno
= inl_f
->emit
->x_reg_rtx_no
+ 3;
531 if (max_regno
< FIRST_PSEUDO_REGISTER
)
534 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
536 /* Check that the parms type match and that sufficient arguments were
537 passed. Since the appropriate conversions or default promotions have
538 already been applied, the machine modes should match exactly. */
540 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
542 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
545 enum machine_mode mode
;
548 return (rtx
) (HOST_WIDE_INT
) -1;
550 arg
= TREE_VALUE (actual
);
551 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
553 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
554 /* If they are block mode, the types should match exactly.
555 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
556 which could happen if the parameter has incomplete type. */
558 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
559 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
560 return (rtx
) (HOST_WIDE_INT
) -1;
563 /* Extra arguments are valid, but will be ignored below, so we must
564 evaluate them here for side-effects. */
565 for (; actual
; actual
= TREE_CHAIN (actual
))
566 expand_expr (TREE_VALUE (actual
), const0_rtx
,
567 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
569 /* Expand the function arguments. Do this first so that any
570 new registers get created before we allocate the maps. */
572 arg_vals
= (rtx
*) xmalloc (nargs
* sizeof (rtx
));
573 arg_trees
= (tree
*) xmalloc (nargs
* sizeof (tree
));
575 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
577 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
579 /* Actual parameter, converted to the type of the argument within the
581 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
582 /* Mode of the variable used within the function. */
583 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
587 loc
= RTVEC_ELT (arg_vector
, i
);
589 /* If this is an object passed by invisible reference, we copy the
590 object into a stack slot and save its address. If this will go
591 into memory, we do nothing now. Otherwise, we just expand the
593 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
594 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
597 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
598 int_size_in_bytes (TREE_TYPE (arg
)), 1);
599 MEM_SET_IN_STRUCT_P (stack_slot
,
600 AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
602 store_expr (arg
, stack_slot
, 0);
604 arg_vals
[i
] = XEXP (stack_slot
, 0);
607 else if (GET_CODE (loc
) != MEM
)
609 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
610 /* The mode if LOC and ARG can differ if LOC was a variable
611 that had its mode promoted via PROMOTED_MODE. */
612 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
613 TYPE_MODE (TREE_TYPE (arg
)),
614 expand_expr (arg
, NULL_RTX
, mode
,
616 TREE_UNSIGNED (TREE_TYPE (formal
)));
618 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
624 && (! TREE_READONLY (formal
)
625 /* If the parameter is not read-only, copy our argument through
626 a register. Also, we cannot use ARG_VALS[I] if it overlaps
627 TARGET in any way. In the inline function, they will likely
628 be two different pseudos, and `safe_from_p' will make all
629 sorts of smart assumptions about their not conflicting.
630 But if ARG_VALS[I] overlaps TARGET, these assumptions are
631 wrong, so put ARG_VALS[I] into a fresh register.
632 Don't worry about invisible references, since their stack
633 temps will never overlap the target. */
636 && (GET_CODE (arg_vals
[i
]) == REG
637 || GET_CODE (arg_vals
[i
]) == SUBREG
638 || GET_CODE (arg_vals
[i
]) == MEM
)
639 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
640 /* ??? We must always copy a SUBREG into a REG, because it might
641 get substituted into an address, and not all ports correctly
642 handle SUBREGs in addresses. */
643 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
644 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
646 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
647 && POINTER_TYPE_P (TREE_TYPE (formal
)))
648 mark_reg_pointer (arg_vals
[i
],
649 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
653 /* Allocate the structures we use to remap things. */
655 map
= (struct inline_remap
*) xmalloc (sizeof (struct inline_remap
));
656 map
->fndecl
= fndecl
;
658 map
->reg_map
= (rtx
*) xcalloc (max_regno
, sizeof (rtx
));
660 /* We used to use alloca here, but the size of what it would try to
661 allocate would occasionally cause it to exceed the stack limit and
662 cause unpredictable core dumps. */
664 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
665 map
->label_map
= real_label_map
;
667 inl_max_uid
= (inl_f
->emit
->x_cur_insn_uid
+ 1);
668 map
->insn_map
= (rtx
*) xcalloc (inl_max_uid
, sizeof (rtx
));
670 map
->max_insnno
= inl_max_uid
;
672 map
->integrating
= 1;
674 /* const_equiv_varray maps pseudos in our routine to constants, so
675 it needs to be large enough for all our pseudos. This is the
676 number we are currently using plus the number in the called
677 routine, plus 15 for each arg, five to compute the virtual frame
678 pointer, and five for the return value. This should be enough
679 for most cases. We do not reference entries outside the range of
682 ??? These numbers are quite arbitrary and were obtained by
683 experimentation. At some point, we should try to allocate the
684 table after all the parameters are set up so we an more accurately
685 estimate the number of pseudos we will need. */
687 VARRAY_CONST_EQUIV_INIT (map
->const_equiv_varray
,
689 + (max_regno
- FIRST_PSEUDO_REGISTER
)
692 "expand_inline_function");
695 /* Record the current insn in case we have to set up pointers to frame
696 and argument memory blocks. If there are no insns yet, add a dummy
697 insn that can be used as an insertion point. */
698 map
->insns_at_start
= get_last_insn ();
699 if (map
->insns_at_start
== 0)
700 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
702 map
->regno_pointer_flag
= inl_f
->emit
->regno_pointer_flag
;
703 map
->regno_pointer_align
= inl_f
->emit
->regno_pointer_align
;
705 /* Update the outgoing argument size to allow for those in the inlined
707 if (inl_f
->outgoing_args_size
> current_function_outgoing_args_size
)
708 current_function_outgoing_args_size
= inl_f
->outgoing_args_size
;
710 /* If the inline function needs to make PIC references, that means
711 that this function's PIC offset table must be used. */
712 if (inl_f
->uses_pic_offset_table
)
713 current_function_uses_pic_offset_table
= 1;
715 /* If this function needs a context, set it up. */
716 if (inl_f
->needs_context
)
717 static_chain_value
= lookup_static_chain (fndecl
);
719 if (GET_CODE (parm_insns
) == NOTE
720 && NOTE_LINE_NUMBER (parm_insns
) > 0)
722 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
723 NOTE_LINE_NUMBER (parm_insns
));
725 RTX_INTEGRATED_P (note
) = 1;
728 /* Figure out where the blocks are if we're going to have to insert
729 new BLOCKs into the existing block tree. */
730 if (current_function
->x_whole_function_mode_p
)
731 find_loop_tree_blocks ();
733 /* Process each argument. For each, set up things so that the function's
734 reference to the argument will refer to the argument being passed.
735 We only replace REG with REG here. Any simplifications are done
738 We make two passes: In the first, we deal with parameters that will
739 be placed into registers, since we need to ensure that the allocated
740 register number fits in const_equiv_map. Then we store all non-register
741 parameters into their memory location. */
743 /* Don't try to free temp stack slots here, because we may put one of the
744 parameters into a temp stack slot. */
746 for (i
= 0; i
< nargs
; i
++)
748 rtx copy
= arg_vals
[i
];
750 loc
= RTVEC_ELT (arg_vector
, i
);
752 /* There are three cases, each handled separately. */
753 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
754 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
756 /* This must be an object passed by invisible reference (it could
757 also be a variable-sized object, but we forbid inlining functions
758 with variable-sized arguments). COPY is the address of the
759 actual value (this computation will cause it to be copied). We
760 map that address for the register, noting the actual address as
761 an equivalent in case it can be substituted into the insns. */
763 if (GET_CODE (copy
) != REG
)
765 temp
= copy_addr_to_reg (copy
);
766 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
767 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
770 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
772 else if (GET_CODE (loc
) == MEM
)
774 /* This is the case of a parameter that lives in memory. It
775 will live in the block we allocate in the called routine's
776 frame that simulates the incoming argument area. Do nothing
777 with the parameter now; we will call store_expr later. In
778 this case, however, we must ensure that the virtual stack and
779 incoming arg rtx values are expanded now so that we can be
780 sure we have enough slots in the const equiv map since the
781 store_expr call can easily blow the size estimate. */
782 if (DECL_FRAME_SIZE (fndecl
) != 0)
783 copy_rtx_and_substitute (virtual_stack_vars_rtx
, map
, 0);
785 if (DECL_SAVED_INSNS (fndecl
)->args_size
!= 0)
786 copy_rtx_and_substitute (virtual_incoming_args_rtx
, map
, 0);
788 else if (GET_CODE (loc
) == REG
)
789 process_reg_param (map
, loc
, copy
);
790 else if (GET_CODE (loc
) == CONCAT
)
792 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
793 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
794 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
795 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
797 process_reg_param (map
, locreal
, copyreal
);
798 process_reg_param (map
, locimag
, copyimag
);
804 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
805 specially. This function can be called recursively, so we need to
806 save the previous value. */
807 inlining_previous
= inlining
;
810 /* Now do the parameters that will be placed in memory. */
812 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
813 formal
; formal
= TREE_CHAIN (formal
), i
++)
815 loc
= RTVEC_ELT (arg_vector
, i
);
817 if (GET_CODE (loc
) == MEM
818 /* Exclude case handled above. */
819 && ! (GET_CODE (XEXP (loc
, 0)) == REG
820 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
822 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
823 DECL_SOURCE_LINE (formal
));
825 RTX_INTEGRATED_P (note
) = 1;
827 /* Compute the address in the area we reserved and store the
829 temp
= copy_rtx_and_substitute (loc
, map
, 1);
830 subst_constants (&temp
, NULL_RTX
, map
, 1);
831 apply_change_group ();
832 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
833 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
834 store_expr (arg_trees
[i
], temp
, 0);
838 /* Deal with the places that the function puts its result.
839 We are driven by what is placed into DECL_RESULT.
841 Initially, we assume that we don't have anything special handling for
842 REG_FUNCTION_RETURN_VALUE_P. */
844 map
->inline_target
= 0;
845 loc
= DECL_RTL (DECL_RESULT (fndecl
));
847 if (TYPE_MODE (type
) == VOIDmode
)
848 /* There is no return value to worry about. */
850 else if (GET_CODE (loc
) == MEM
)
852 if (GET_CODE (XEXP (loc
, 0)) == ADDRESSOF
)
854 temp
= copy_rtx_and_substitute (loc
, map
, 1);
855 subst_constants (&temp
, NULL_RTX
, map
, 1);
856 apply_change_group ();
861 if (! structure_value_addr
862 || ! aggregate_value_p (DECL_RESULT (fndecl
)))
865 /* Pass the function the address in which to return a structure
866 value. Note that a constructor can cause someone to call us
867 with STRUCTURE_VALUE_ADDR, but the initialization takes place
868 via the first parameter, rather than the struct return address.
870 We have two cases: If the address is a simple register
871 indirect, use the mapping mechanism to point that register to
872 our structure return address. Otherwise, store the structure
873 return value into the place that it will be referenced from. */
875 if (GET_CODE (XEXP (loc
, 0)) == REG
)
877 temp
= force_operand (structure_value_addr
, NULL_RTX
);
878 temp
= force_reg (Pmode
, temp
);
879 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
881 if (CONSTANT_P (structure_value_addr
)
882 || GET_CODE (structure_value_addr
) == ADDRESSOF
883 || (GET_CODE (structure_value_addr
) == PLUS
884 && (XEXP (structure_value_addr
, 0)
885 == virtual_stack_vars_rtx
)
886 && (GET_CODE (XEXP (structure_value_addr
, 1))
889 SET_CONST_EQUIV_DATA (map
, temp
, structure_value_addr
,
895 temp
= copy_rtx_and_substitute (loc
, map
, 1);
896 subst_constants (&temp
, NULL_RTX
, map
, 0);
897 apply_change_group ();
898 emit_move_insn (temp
, structure_value_addr
);
903 /* We will ignore the result value, so don't look at its structure.
904 Note that preparations for an aggregate return value
905 do need to be made (above) even if it will be ignored. */
907 else if (GET_CODE (loc
) == REG
)
909 /* The function returns an object in a register and we use the return
910 value. Set up our target for remapping. */
912 /* Machine mode function was declared to return. */
913 enum machine_mode departing_mode
= TYPE_MODE (type
);
914 /* (Possibly wider) machine mode it actually computes
915 (for the sake of callers that fail to declare it right).
916 We have to use the mode of the result's RTL, rather than
917 its type, since expand_function_start may have promoted it. */
918 enum machine_mode arriving_mode
919 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
922 /* Don't use MEMs as direct targets because on some machines
923 substituting a MEM for a REG makes invalid insns.
924 Let the combiner substitute the MEM if that is valid. */
925 if (target
== 0 || GET_CODE (target
) != REG
926 || GET_MODE (target
) != departing_mode
)
928 /* Don't make BLKmode registers. If this looks like
929 a BLKmode object being returned in a register, get
930 the mode from that, otherwise abort. */
931 if (departing_mode
== BLKmode
)
933 if (REG
== GET_CODE (DECL_RTL (DECL_RESULT (fndecl
))))
935 departing_mode
= GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
936 arriving_mode
= departing_mode
;
942 target
= gen_reg_rtx (departing_mode
);
945 /* If function's value was promoted before return,
946 avoid machine mode mismatch when we substitute INLINE_TARGET.
947 But TARGET is what we will return to the caller. */
948 if (arriving_mode
!= departing_mode
)
950 /* Avoid creating a paradoxical subreg wider than
951 BITS_PER_WORD, since that is illegal. */
952 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
954 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
955 GET_MODE_BITSIZE (arriving_mode
)))
956 /* Maybe could be handled by using convert_move () ? */
958 reg_to_map
= gen_reg_rtx (arriving_mode
);
959 target
= gen_lowpart (departing_mode
, reg_to_map
);
962 reg_to_map
= gen_rtx_SUBREG (arriving_mode
, target
, 0);
967 /* Usually, the result value is the machine's return register.
968 Sometimes it may be a pseudo. Handle both cases. */
969 if (REG_FUNCTION_VALUE_P (loc
))
970 map
->inline_target
= reg_to_map
;
972 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
977 /* Make a fresh binding contour that we can easily remove. Do this after
978 expanding our arguments so cleanups are properly scoped. */
979 expand_start_bindings (0);
981 /* Initialize label_map. get_label_from_map will actually make
983 bzero ((char *) &map
->label_map
[min_labelno
],
984 (max_labelno
- min_labelno
) * sizeof (rtx
));
986 /* Perform postincrements before actually calling the function. */
989 /* Clean up stack so that variables might have smaller offsets. */
990 do_pending_stack_adjust ();
992 /* Save a copy of the location of const_equiv_varray for
993 mark_stores, called via note_stores. */
994 global_const_equiv_varray
= map
->const_equiv_varray
;
996 /* If the called function does an alloca, save and restore the
997 stack pointer around the call. This saves stack space, but
998 also is required if this inline is being done between two
1000 if (inl_f
->calls_alloca
)
1001 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1003 /* Now copy the insns one by one. Do this in two passes, first the insns and
1004 then their REG_NOTES, just like save_for_inline. */
1006 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1008 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1010 rtx copy
, pattern
, set
;
1012 map
->orig_asm_operands_vector
= 0;
1014 switch (GET_CODE (insn
))
1017 pattern
= PATTERN (insn
);
1018 set
= single_set (insn
);
1020 if (GET_CODE (pattern
) == USE
1021 && GET_CODE (XEXP (pattern
, 0)) == REG
1022 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1023 /* The (USE (REG n)) at return from the function should
1024 be ignored since we are changing (REG n) into
1028 /* If the inline fn needs eh context, make sure that
1029 the current fn has one. */
1030 if (GET_CODE (pattern
) == USE
1031 && find_reg_note (insn
, REG_EH_CONTEXT
, 0) != 0)
1034 /* Ignore setting a function value that we don't want to use. */
1035 if (map
->inline_target
== 0
1037 && GET_CODE (SET_DEST (set
)) == REG
1038 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1040 if (volatile_refs_p (SET_SRC (set
)))
1044 /* If we must not delete the source,
1045 load it into a new temporary. */
1046 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1048 new_set
= single_set (copy
);
1053 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1055 /* If the source and destination are the same and it
1056 has a note on it, keep the insn. */
1057 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1058 && REG_NOTES (insn
) != 0)
1059 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1064 /* If this is setting the static chain rtx, omit it. */
1065 else if (static_chain_value
!= 0
1067 && GET_CODE (SET_DEST (set
)) == REG
1068 && rtx_equal_p (SET_DEST (set
),
1069 static_chain_incoming_rtx
))
1072 /* If this is setting the static chain pseudo, set it from
1073 the value we want to give it instead. */
1074 else if (static_chain_value
!= 0
1076 && rtx_equal_p (SET_SRC (set
),
1077 static_chain_incoming_rtx
))
1079 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
, 1);
1081 copy
= emit_move_insn (newdest
, static_chain_value
);
1082 static_chain_value
= 0;
1085 /* If this is setting the virtual stack vars register, this must
1086 be the code at the handler for a builtin longjmp. The value
1087 saved in the setjmp buffer will be the address of the frame
1088 we've made for this inlined instance within our frame. But we
1089 know the offset of that value so we can use it to reconstruct
1090 our virtual stack vars register from that value. If we are
1091 copying it from the stack pointer, leave it unchanged. */
1093 && rtx_equal_p (SET_DEST (set
), virtual_stack_vars_rtx
))
1095 HOST_WIDE_INT offset
;
1096 temp
= map
->reg_map
[REGNO (SET_DEST (set
))];
1097 temp
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1100 if (rtx_equal_p (temp
, virtual_stack_vars_rtx
))
1102 else if (GET_CODE (temp
) == PLUS
1103 && rtx_equal_p (XEXP (temp
, 0), virtual_stack_vars_rtx
)
1104 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
)
1105 offset
= INTVAL (XEXP (temp
, 1));
1109 if (rtx_equal_p (SET_SRC (set
), stack_pointer_rtx
))
1110 temp
= SET_SRC (set
);
1112 temp
= force_operand (plus_constant (SET_SRC (set
),
1116 copy
= emit_move_insn (virtual_stack_vars_rtx
, temp
);
1120 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1121 /* REG_NOTES will be copied later. */
1124 /* If this insn is setting CC0, it may need to look at
1125 the insn that uses CC0 to see what type of insn it is.
1126 In that case, the call to recog via validate_change will
1127 fail. So don't substitute constants here. Instead,
1128 do it when we emit the following insn.
1130 For example, see the pyr.md file. That machine has signed and
1131 unsigned compares. The compare patterns must check the
1132 following branch insn to see which what kind of compare to
1135 If the previous insn set CC0, substitute constants on it as
1137 if (sets_cc0_p (PATTERN (copy
)) != 0)
1142 try_constants (cc0_insn
, map
);
1144 try_constants (copy
, map
);
1147 try_constants (copy
, map
);
1152 if (GET_CODE (PATTERN (insn
)) == RETURN
1153 || (GET_CODE (PATTERN (insn
)) == PARALLEL
1154 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
1156 if (local_return_label
== 0)
1157 local_return_label
= gen_label_rtx ();
1158 pattern
= gen_jump (local_return_label
);
1161 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1163 copy
= emit_jump_insn (pattern
);
1167 try_constants (cc0_insn
, map
);
1170 try_constants (copy
, map
);
1172 /* If this used to be a conditional jump insn but whose branch
1173 direction is now know, we must do something special. */
1174 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1177 /* If the previous insn set cc0 for us, delete it. */
1178 if (sets_cc0_p (PREV_INSN (copy
)))
1179 delete_insn (PREV_INSN (copy
));
1182 /* If this is now a no-op, delete it. */
1183 if (map
->last_pc_value
== pc_rtx
)
1189 /* Otherwise, this is unconditional jump so we must put a
1190 BARRIER after it. We could do some dead code elimination
1191 here, but jump.c will do it just as well. */
1197 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1198 copy
= emit_call_insn (pattern
);
1200 /* Because the USAGE information potentially contains objects other
1201 than hard registers, we need to copy it. */
1202 CALL_INSN_FUNCTION_USAGE (copy
)
1203 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
),
1208 try_constants (cc0_insn
, map
);
1211 try_constants (copy
, map
);
1213 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1214 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1215 VARRAY_CONST_EQUIV (map
->const_equiv_varray
, i
).rtx
= 0;
1219 copy
= emit_label (get_label_from_map (map
,
1220 CODE_LABEL_NUMBER (insn
)));
1221 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1226 copy
= emit_barrier ();
1230 /* It is important to discard function-end and function-beg notes,
1231 so we have only one of each in the current function.
1232 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1233 deleted these in the copy used for continuing compilation,
1234 not the copy used for inlining). */
1235 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1236 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1237 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1239 copy
= emit_note (NOTE_SOURCE_FILE (insn
),
1240 NOTE_LINE_NUMBER (insn
));
1242 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
1243 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_END
))
1246 = get_label_from_map (map
, NOTE_EH_HANDLER (copy
));
1248 /* we have to duplicate the handlers for the original */
1249 if (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EH_REGION_BEG
)
1251 /* We need to duplicate the handlers for the EH region
1252 and we need to indicate where the label map is */
1254 duplicate_eh_handlers (NOTE_EH_HANDLER (copy
),
1255 CODE_LABEL_NUMBER (label
),
1256 expand_inline_function_eh_labelmap
);
1259 /* We have to forward these both to match the new exception
1261 NOTE_EH_HANDLER (copy
) = CODE_LABEL_NUMBER (label
);
1273 RTX_INTEGRATED_P (copy
) = 1;
1275 map
->insn_map
[INSN_UID (insn
)] = copy
;
1278 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1279 from parameters can be substituted in. These are the only ones that
1280 are valid across the entire function. */
1282 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1283 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1284 && map
->insn_map
[INSN_UID (insn
)]
1285 && REG_NOTES (insn
))
1287 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
, 0);
1289 /* We must also do subst_constants, in case one of our parameters
1290 has const type and constant value. */
1291 subst_constants (&tem
, NULL_RTX
, map
, 0);
1292 apply_change_group ();
1293 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1296 if (local_return_label
)
1297 emit_label (local_return_label
);
1299 /* Restore the stack pointer if we saved it above. */
1300 if (inl_f
->calls_alloca
)
1301 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1303 /* Make copies of the decls of the symbols in the inline function, so that
1304 the copies of the variables get declared in the current function. Set
1305 up things so that lookup_static_chain knows that to interpret registers
1306 in SAVE_EXPRs for TYPE_SIZEs as local. */
1308 inline_function_decl
= fndecl
;
1309 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1310 block
= integrate_decl_tree (inl_f
->original_decl_initial
, map
);
1311 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
1312 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
1313 inline_function_decl
= 0;
1315 if (current_function
->x_whole_function_mode_p
)
1316 /* Insert the block into the already existing block-tree. */
1317 retrofit_block (block
, map
->insns_at_start
);
1319 /* In statement-at-a-time mode, we just tell the front-end to add
1320 this block to the list of blocks at this binding level. We
1321 can't do it the way it's done for function-at-a-time mode the
1322 superblocks have not been created yet. */
1323 insert_block (block
);
1325 /* End the scope containing the copied formal parameter variables
1326 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1327 here so that expand_end_bindings will not check for unused
1328 variables. That's already been checked for when the inlined
1329 function was defined. */
1330 expand_end_bindings (NULL_TREE
, 1, 1);
1332 /* Must mark the line number note after inlined functions as a repeat, so
1333 that the test coverage code can avoid counting the call twice. This
1334 just tells the code to ignore the immediately following line note, since
1335 there already exists a copy of this note before the expanded inline call.
1336 This line number note is still needed for debugging though, so we can't
1338 if (flag_test_coverage
)
1339 emit_note (0, NOTE_REPEATED_LINE_NUMBER
);
1341 emit_line_note (input_filename
, lineno
);
1343 /* If the function returns a BLKmode object in a register, copy it
1344 out of the temp register into a BLKmode memory object. */
1345 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
1346 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
1347 target
= copy_blkmode_from_reg (0, target
, TREE_TYPE (TREE_TYPE (fndecl
)));
1349 if (structure_value_addr
)
1351 target
= gen_rtx_MEM (TYPE_MODE (type
),
1352 memory_address (TYPE_MODE (type
),
1353 structure_value_addr
));
1354 MEM_SET_IN_STRUCT_P (target
, 1);
1357 /* Make sure we free the things we explicitly allocated with xmalloc. */
1359 free (real_label_map
);
1360 VARRAY_FREE (map
->const_equiv_varray
);
1361 free (map
->reg_map
);
1362 free (map
->insn_map
);
1367 inlining
= inlining_previous
;
1372 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1373 push all of those decls and give each one the corresponding home. */
1376 integrate_parm_decls (args
, map
, arg_vector
)
1378 struct inline_remap
*map
;
1384 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1386 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
1389 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
, 1);
1391 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
1392 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1393 here, but that's going to require some more work. */
1394 /* DECL_INCOMING_RTL (decl) = ?; */
1395 /* These args would always appear unused, if not for this. */
1396 TREE_USED (decl
) = 1;
1397 /* Prevent warning for shadowing with these. */
1398 DECL_ABSTRACT_ORIGIN (decl
) = DECL_ORIGIN (tail
);
1399 DECL_CONTEXT (decl
) = current_function_decl
;
1400 /* Fully instantiate the address with the equivalent form so that the
1401 debugging information contains the actual register, instead of the
1402 virtual register. Do this by not passing an insn to
1404 subst_constants (&new_decl_rtl
, NULL_RTX
, map
, 1);
1405 apply_change_group ();
1406 DECL_RTL (decl
) = new_decl_rtl
;
1410 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1411 current function a tree of contexts isomorphic to the one that is given.
1413 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1414 registers used in the DECL_RTL field should be remapped. If it is zero,
1415 no mapping is necessary. */
1418 integrate_decl_tree (let
, map
)
1420 struct inline_remap
*map
;
1426 new_block
= make_node (BLOCK
);
1427 next
= &BLOCK_VARS (new_block
);
1429 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1433 push_obstacks_nochange ();
1434 saveable_allocation ();
1435 d
= copy_and_set_decl_abstract_origin (t
);
1438 if (DECL_RTL (t
) != 0)
1440 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
, 1);
1442 /* Fully instantiate the address with the equivalent form so that the
1443 debugging information contains the actual register, instead of the
1444 virtual register. Do this by not passing an insn to
1446 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
, 1);
1447 apply_change_group ();
1449 /* These args would always appear unused, if not for this. */
1452 if (DECL_LANG_SPECIFIC (d
))
1455 /* Set the context for the new declaration. */
1456 if (!DECL_CONTEXT (t
))
1457 /* Globals stay global. */
1459 else if (DECL_CONTEXT (t
) != map
->fndecl
)
1460 /* Things that weren't in the scope of the function we're
1461 inlining from aren't in the scope we're inlining too,
1464 else if (TREE_STATIC (t
))
1465 /* Function-scoped static variables should say in the original
1469 /* Ordinary automatic local variables are now in the scope of
1470 the new function. */
1471 DECL_CONTEXT (d
) = current_function_decl
;
1473 /* Add this declaration to the list of variables in the new
1476 next
= &TREE_CHAIN (d
);
1479 next
= &BLOCK_SUBBLOCKS (new_block
);
1480 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1482 *next
= integrate_decl_tree (t
, map
);
1483 BLOCK_SUPERCONTEXT (*next
) = new_block
;
1484 next
= &BLOCK_CHAIN (*next
);
1487 TREE_USED (new_block
) = TREE_USED (let
);
1488 BLOCK_ABSTRACT_ORIGIN (new_block
) = let
;
1493 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1494 except for those few rtx codes that are sharable.
1496 We always return an rtx that is similar to that incoming rtx, with the
1497 exception of possibly changing a REG to a SUBREG or vice versa. No
1498 rtl is ever emitted.
1500 If FOR_LHS is nonzero, if means we are processing something that will
1501 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1502 inlining since we need to be conservative in how it is set for
1505 Handle constants that need to be placed in the constant pool by
1506 calling `force_const_mem'. */
1509 copy_rtx_and_substitute (orig
, map
, for_lhs
)
1511 struct inline_remap
*map
;
1514 register rtx copy
, temp
;
1516 register RTX_CODE code
;
1517 register enum machine_mode mode
;
1518 register const char *format_ptr
;
1524 code
= GET_CODE (orig
);
1525 mode
= GET_MODE (orig
);
1530 /* If the stack pointer register shows up, it must be part of
1531 stack-adjustments (*not* because we eliminated the frame pointer!).
1532 Small hard registers are returned as-is. Pseudo-registers
1533 go through their `reg_map'. */
1534 regno
= REGNO (orig
);
1535 if (regno
<= LAST_VIRTUAL_REGISTER
1536 || (map
->integrating
1537 && DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
== orig
))
1539 /* Some hard registers are also mapped,
1540 but others are not translated. */
1541 if (map
->reg_map
[regno
] != 0)
1542 return map
->reg_map
[regno
];
1544 /* If this is the virtual frame pointer, make space in current
1545 function's stack frame for the stack frame of the inline function.
1547 Copy the address of this area into a pseudo. Map
1548 virtual_stack_vars_rtx to this pseudo and set up a constant
1549 equivalence for it to be the address. This will substitute the
1550 address into insns where it can be substituted and use the new
1551 pseudo where it can't. */
1552 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
1555 int size
= get_func_frame_size (DECL_SAVED_INSNS (map
->fndecl
));
1557 #ifdef FRAME_GROWS_DOWNWARD
1558 /* In this case, virtual_stack_vars_rtx points to one byte
1559 higher than the top of the frame area. So make sure we
1560 allocate a big enough chunk to keep the frame pointer
1561 aligned like a real one. */
1562 size
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
1565 loc
= assign_stack_temp (BLKmode
, size
, 1);
1566 loc
= XEXP (loc
, 0);
1567 #ifdef FRAME_GROWS_DOWNWARD
1568 /* In this case, virtual_stack_vars_rtx points to one byte
1569 higher than the top of the frame area. So compute the offset
1570 to one byte higher than our substitute frame. */
1571 loc
= plus_constant (loc
, size
);
1573 map
->reg_map
[regno
] = temp
1574 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1576 #ifdef STACK_BOUNDARY
1577 mark_reg_pointer (map
->reg_map
[regno
],
1578 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1581 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1583 seq
= gen_sequence ();
1585 emit_insn_after (seq
, map
->insns_at_start
);
1588 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
1589 || (map
->integrating
1590 && (DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
1593 /* Do the same for a block to contain any arguments referenced
1596 int size
= DECL_SAVED_INSNS (map
->fndecl
)->args_size
;
1599 loc
= assign_stack_temp (BLKmode
, size
, 1);
1600 loc
= XEXP (loc
, 0);
1601 /* When arguments grow downward, the virtual incoming
1602 args pointer points to the top of the argument block,
1603 so the remapped location better do the same. */
1604 #ifdef ARGS_GROW_DOWNWARD
1605 loc
= plus_constant (loc
, size
);
1607 map
->reg_map
[regno
] = temp
1608 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1610 #ifdef STACK_BOUNDARY
1611 mark_reg_pointer (map
->reg_map
[regno
],
1612 STACK_BOUNDARY
/ BITS_PER_UNIT
);
1615 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1617 seq
= gen_sequence ();
1619 emit_insn_after (seq
, map
->insns_at_start
);
1622 else if (REG_FUNCTION_VALUE_P (orig
))
1624 /* This is a reference to the function return value. If
1625 the function doesn't have a return value, error. If the
1626 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1627 if (map
->inline_target
== 0)
1628 /* Must be unrolling loops or replicating code if we
1629 reach here, so return the register unchanged. */
1631 else if (GET_MODE (map
->inline_target
) != BLKmode
1632 && mode
!= GET_MODE (map
->inline_target
))
1633 return gen_lowpart (mode
, map
->inline_target
);
1635 return map
->inline_target
;
1639 if (map
->reg_map
[regno
] == NULL
)
1641 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
1642 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
1643 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
1644 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
1645 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1647 if (map
->regno_pointer_flag
[regno
])
1648 mark_reg_pointer (map
->reg_map
[regno
],
1649 map
->regno_pointer_align
[regno
]);
1651 return map
->reg_map
[regno
];
1654 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
, for_lhs
);
1655 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1656 if (GET_CODE (copy
) == SUBREG
)
1657 return gen_rtx_SUBREG (GET_MODE (orig
), SUBREG_REG (copy
),
1658 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
1659 else if (GET_CODE (copy
) == CONCAT
)
1661 rtx retval
= subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1);
1663 if (GET_MODE (retval
) == GET_MODE (orig
))
1666 return gen_rtx_SUBREG (GET_MODE (orig
), retval
,
1667 (SUBREG_WORD (orig
) %
1668 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig
)))
1669 / (unsigned) UNITS_PER_WORD
)));
1672 return gen_rtx_SUBREG (GET_MODE (orig
), copy
,
1673 SUBREG_WORD (orig
));
1676 copy
= gen_rtx_ADDRESSOF (mode
,
1677 copy_rtx_and_substitute (XEXP (orig
, 0),
1679 0, ADDRESSOF_DECL(orig
));
1680 regno
= ADDRESSOF_REGNO (orig
);
1681 if (map
->reg_map
[regno
])
1682 regno
= REGNO (map
->reg_map
[regno
]);
1683 else if (regno
> LAST_VIRTUAL_REGISTER
)
1685 temp
= XEXP (orig
, 0);
1686 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
1687 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
1688 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
1689 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
1690 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1692 if (map
->regno_pointer_flag
[regno
])
1693 mark_reg_pointer (map
->reg_map
[regno
],
1694 map
->regno_pointer_align
[regno
]);
1695 regno
= REGNO (map
->reg_map
[regno
]);
1697 ADDRESSOF_REGNO (copy
) = regno
;
1702 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1703 to (use foo) if the original insn didn't have a subreg.
1704 Removing the subreg distorts the VAX movstrhi pattern
1705 by changing the mode of an operand. */
1706 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
, code
== CLOBBER
);
1707 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
1708 copy
= SUBREG_REG (copy
);
1709 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
1712 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
1713 = LABEL_PRESERVE_P (orig
);
1714 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
1720 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1721 : get_label_from_map (map
, CODE_LABEL_NUMBER (XEXP (orig
, 0))));
1723 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
1725 /* The fact that this label was previously nonlocal does not mean
1726 it still is, so we must check if it is within the range of
1727 this function's labels. */
1728 LABEL_REF_NONLOCAL_P (copy
)
1729 = (LABEL_REF_NONLOCAL_P (orig
)
1730 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
1731 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
1733 /* If we have made a nonlocal label local, it means that this
1734 inlined call will be referring to our nonlocal goto handler.
1735 So make sure we create one for this block; we normally would
1736 not since this is not otherwise considered a "call". */
1737 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
1738 function_call_count
++;
1748 /* Symbols which represent the address of a label stored in the constant
1749 pool must be modified to point to a constant pool entry for the
1750 remapped label. Otherwise, symbols are returned unchanged. */
1751 if (CONSTANT_POOL_ADDRESS_P (orig
))
1753 struct function
*f
= inlining
? inlining
: current_function
;
1754 rtx constant
= get_pool_constant_for_function (f
, orig
);
1755 enum machine_mode const_mode
= get_pool_mode_for_function (f
, orig
);
1758 rtx temp
= force_const_mem (const_mode
,
1759 copy_rtx_and_substitute (constant
,
1763 /* Legitimizing the address here is incorrect.
1765 Since we had a SYMBOL_REF before, we can assume it is valid
1766 to have one in this position in the insn.
1768 Also, change_address may create new registers. These
1769 registers will not have valid reg_map entries. This can
1770 cause try_constants() to fail because assumes that all
1771 registers in the rtx have valid reg_map entries, and it may
1772 end up replacing one of these new registers with junk. */
1774 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1775 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
1778 temp
= XEXP (temp
, 0);
1780 #ifdef POINTERS_EXTEND_UNSIGNED
1781 if (GET_MODE (temp
) != GET_MODE (orig
))
1782 temp
= convert_memory_address (GET_MODE (orig
), temp
);
1786 else if (GET_CODE (constant
) == LABEL_REF
)
1787 return XEXP (force_const_mem
1789 copy_rtx_and_substitute (constant
, map
, for_lhs
)),
1793 if (SYMBOL_REF_NEED_ADJUST (orig
))
1796 return rethrow_symbol_map (orig
,
1797 expand_inline_function_eh_labelmap
);
1803 /* We have to make a new copy of this CONST_DOUBLE because don't want
1804 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1805 duplicate of a CONST_DOUBLE we have already seen. */
1806 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
1810 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
1811 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
1814 return immed_double_const (CONST_DOUBLE_LOW (orig
),
1815 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
1818 /* Make new constant pool entry for a constant
1819 that was in the pool of the inline function. */
1820 if (RTX_INTEGRATED_P (orig
))
1825 /* If a single asm insn contains multiple output operands
1826 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1827 We must make sure that the copied insn continues to share it. */
1828 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
1830 copy
= rtx_alloc (ASM_OPERANDS
);
1831 copy
->volatil
= orig
->volatil
;
1832 XSTR (copy
, 0) = XSTR (orig
, 0);
1833 XSTR (copy
, 1) = XSTR (orig
, 1);
1834 XINT (copy
, 2) = XINT (orig
, 2);
1835 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
1836 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
1837 XSTR (copy
, 5) = XSTR (orig
, 5);
1838 XINT (copy
, 6) = XINT (orig
, 6);
1844 /* This is given special treatment because the first
1845 operand of a CALL is a (MEM ...) which may get
1846 forced into a register for cse. This is undesirable
1847 if function-address cse isn't wanted or if we won't do cse. */
1848 #ifndef NO_FUNCTION_CSE
1849 if (! (optimize
&& ! flag_no_function_cse
))
1854 gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
1855 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
1857 copy_rtx_and_substitute (XEXP (orig
, 1), map
, 0));
1861 /* Must be ifdefed out for loop unrolling to work. */
1867 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1868 Adjust the setting by the offset of the area we made.
1869 If the nonlocal goto is into the current function,
1870 this will result in unnecessarily bad code, but should work. */
1871 if (SET_DEST (orig
) == virtual_stack_vars_rtx
1872 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
1874 /* In case a translation hasn't occurred already, make one now. */
1877 HOST_WIDE_INT loc_offset
;
1879 copy_rtx_and_substitute (SET_DEST (orig
), map
, for_lhs
);
1880 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
1881 equiv_loc
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1882 REGNO (equiv_reg
)).rtx
;
1884 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
1886 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
1889 (copy_rtx_and_substitute (SET_SRC (orig
),
1895 return gen_rtx_SET (VOIDmode
,
1896 copy_rtx_and_substitute (SET_DEST (orig
), map
, 1),
1897 copy_rtx_and_substitute (SET_SRC (orig
), map
, 0));
1902 && GET_CODE (XEXP (orig
, 0)) == SYMBOL_REF
1903 && CONSTANT_POOL_ADDRESS_P (XEXP (orig
, 0)))
1905 enum machine_mode const_mode
1906 = get_pool_mode_for_function (inlining
, XEXP (orig
, 0));
1908 = get_pool_constant_for_function (inlining
, XEXP (orig
, 0));
1910 constant
= copy_rtx_and_substitute (constant
, map
, 0);
1912 /* If this was an address of a constant pool entry that itself
1913 had to be placed in the constant pool, it might not be a
1914 valid address. So the recursive call might have turned it
1915 into a register. In that case, it isn't a constant any
1916 more, so return it. This has the potential of changing a
1917 MEM into a REG, but we'll assume that it safe. */
1918 if (! CONSTANT_P (constant
))
1921 return validize_mem (force_const_mem (const_mode
, constant
));
1924 copy
= rtx_alloc (MEM
);
1925 PUT_MODE (copy
, mode
);
1926 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
, 0);
1927 MEM_COPY_ATTRIBUTES (copy
, orig
);
1928 MEM_ALIAS_SET (copy
) = MEM_ALIAS_SET (orig
);
1929 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
1936 copy
= rtx_alloc (code
);
1937 PUT_MODE (copy
, mode
);
1938 copy
->in_struct
= orig
->in_struct
;
1939 copy
->volatil
= orig
->volatil
;
1940 copy
->unchanging
= orig
->unchanging
;
1942 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
1944 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
1946 switch (*format_ptr
++)
1949 /* Copy this through the wide int field; that's safest. */
1950 X0WINT (copy
, i
) = X0WINT (orig
, i
);
1955 = copy_rtx_and_substitute (XEXP (orig
, i
), map
, for_lhs
);
1959 /* Change any references to old-insns to point to the
1960 corresponding copied insns. */
1961 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
1965 XVEC (copy
, i
) = XVEC (orig
, i
);
1966 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
1968 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
1969 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
1970 XVECEXP (copy
, i
, j
)
1971 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
),
1977 XWINT (copy
, i
) = XWINT (orig
, i
);
1981 XINT (copy
, i
) = XINT (orig
, i
);
1985 XSTR (copy
, i
) = XSTR (orig
, i
);
1989 XTREE (copy
, i
) = XTREE (orig
, i
);
1997 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
1999 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2000 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2001 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2007 /* Substitute known constant values into INSN, if that is valid. */
2010 try_constants (insn
, map
)
2012 struct inline_remap
*map
;
2018 /* First try just updating addresses, then other things. This is
2019 important when we have something like the store of a constant
2020 into memory and we can update the memory address but the machine
2021 does not support a constant source. */
2022 subst_constants (&PATTERN (insn
), insn
, map
, 1);
2023 apply_change_group ();
2024 subst_constants (&PATTERN (insn
), insn
, map
, 0);
2025 apply_change_group ();
2027 /* Show we don't know the value of anything stored or clobbered. */
2028 note_stores (PATTERN (insn
), mark_stores
, NULL
);
2029 map
->last_pc_value
= 0;
2031 map
->last_cc0_value
= 0;
2034 /* Set up any constant equivalences made in this insn. */
2035 for (i
= 0; i
< map
->num_sets
; i
++)
2037 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2039 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2041 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map
, regno
);
2042 if (VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
).rtx
== 0
2043 /* Following clause is a hack to make case work where GNU C++
2044 reassigns a variable to make cse work right. */
2045 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
2047 map
->equiv_sets
[i
].equiv
))
2048 SET_CONST_EQUIV_DATA (map
, map
->equiv_sets
[i
].dest
,
2049 map
->equiv_sets
[i
].equiv
, map
->const_age
);
2051 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2052 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2054 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2055 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2060 /* Substitute known constants for pseudo regs in the contents of LOC,
2061 which are part of INSN.
2062 If INSN is zero, the substitution should always be done (this is used to
2064 These changes are taken out by try_constants if the result is not valid.
2066 Note that we are more concerned with determining when the result of a SET
2067 is a constant, for further propagation, than actually inserting constants
2068 into insns; cse will do the latter task better.
2070 This function is also used to adjust address of items previously addressed
2071 via the virtual stack variable or virtual incoming arguments registers.
2073 If MEMONLY is nonzero, only make changes inside a MEM. */
2076 subst_constants (loc
, insn
, map
, memonly
)
2079 struct inline_remap
*map
;
2084 register enum rtx_code code
;
2085 register const char *format_ptr
;
2086 int num_changes
= num_validated_changes ();
2088 enum machine_mode op0_mode
= MAX_MACHINE_MODE
;
2090 code
= GET_CODE (x
);
2106 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2112 /* The only thing we can do with a USE or CLOBBER is possibly do
2113 some substitutions in a MEM within it. */
2114 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2115 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
, 0);
2119 /* Substitute for parms and known constants. Don't replace
2120 hard regs used as user variables with constants. */
2123 int regno
= REGNO (x
);
2124 struct const_equiv_data
*p
;
2126 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2127 && (size_t) regno
< VARRAY_SIZE (map
->const_equiv_varray
)
2128 && (p
= &VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
),
2130 && p
->age
>= map
->const_age
)
2131 validate_change (insn
, loc
, p
->rtx
, 1);
2136 /* SUBREG applied to something other than a reg
2137 should be treated as ordinary, since that must
2138 be a special hack and we don't know how to treat it specially.
2139 Consider for example mulsidi3 in m68k.md.
2140 Ordinary SUBREG of a REG needs this special treatment. */
2141 if (! memonly
&& GET_CODE (SUBREG_REG (x
)) == REG
)
2143 rtx inner
= SUBREG_REG (x
);
2146 /* We can't call subst_constants on &SUBREG_REG (x) because any
2147 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2148 see what is inside, try to form the new SUBREG and see if that is
2149 valid. We handle two cases: extracting a full word in an
2150 integral mode and extracting the low part. */
2151 subst_constants (&inner
, NULL_RTX
, map
, 0);
2153 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2154 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2155 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2156 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2157 GET_MODE (SUBREG_REG (x
)));
2159 cancel_changes (num_changes
);
2160 if (new == 0 && subreg_lowpart_p (x
))
2161 new = gen_lowpart_common (GET_MODE (x
), inner
);
2164 validate_change (insn
, loc
, new, 1);
2171 subst_constants (&XEXP (x
, 0), insn
, map
, 0);
2173 /* If a memory address got spoiled, change it back. */
2174 if (! memonly
&& insn
!= 0 && num_validated_changes () != num_changes
2175 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2176 cancel_changes (num_changes
);
2181 /* Substitute constants in our source, and in any arguments to a
2182 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2184 rtx
*dest_loc
= &SET_DEST (x
);
2185 rtx dest
= *dest_loc
;
2188 subst_constants (&SET_SRC (x
), insn
, map
, memonly
);
2191 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2192 || GET_CODE (*dest_loc
) == SUBREG
2193 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2195 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2197 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
, memonly
);
2198 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
, memonly
);
2200 dest_loc
= &XEXP (*dest_loc
, 0);
2203 /* Do substitute in the address of a destination in memory. */
2204 if (GET_CODE (*dest_loc
) == MEM
)
2205 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
, 0);
2207 /* Check for the case of DEST a SUBREG, both it and the underlying
2208 register are less than one word, and the SUBREG has the wider mode.
2209 In the case, we are really setting the underlying register to the
2210 source converted to the mode of DEST. So indicate that. */
2211 if (GET_CODE (dest
) == SUBREG
2212 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2213 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2214 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2215 <= GET_MODE_SIZE (GET_MODE (dest
)))
2216 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2218 src
= tem
, dest
= SUBREG_REG (dest
);
2220 /* If storing a recognizable value save it for later recording. */
2221 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2222 && (CONSTANT_P (src
)
2223 || (GET_CODE (src
) == REG
2224 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2225 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2226 || (GET_CODE (src
) == PLUS
2227 && GET_CODE (XEXP (src
, 0)) == REG
2228 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2229 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2230 && CONSTANT_P (XEXP (src
, 1)))
2231 || GET_CODE (src
) == COMPARE
2236 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2237 || GET_CODE (src
) == LABEL_REF
))))
2239 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2240 it will cause us to save the COMPARE with any constants
2241 substituted, which is what we want for later. */
2242 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2243 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2252 format_ptr
= GET_RTX_FORMAT (code
);
2254 /* If the first operand is an expression, save its mode for later. */
2255 if (*format_ptr
== 'e')
2256 op0_mode
= GET_MODE (XEXP (x
, 0));
2258 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2260 switch (*format_ptr
++)
2267 subst_constants (&XEXP (x
, i
), insn
, map
, memonly
);
2278 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2279 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2280 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
, memonly
);
2289 /* If this is a commutative operation, move a constant to the second
2290 operand unless the second operand is already a CONST_INT. */
2292 && (GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2293 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2295 rtx tem
= XEXP (x
, 0);
2296 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2297 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2300 /* Simplify the expression in case we put in some constants. */
2302 switch (GET_RTX_CLASS (code
))
2305 if (op0_mode
== MAX_MACHINE_MODE
)
2307 new = simplify_unary_operation (code
, GET_MODE (x
),
2308 XEXP (x
, 0), op0_mode
);
2313 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2315 if (op_mode
== VOIDmode
)
2316 op_mode
= GET_MODE (XEXP (x
, 1));
2317 new = simplify_relational_operation (code
, op_mode
,
2318 XEXP (x
, 0), XEXP (x
, 1));
2319 #ifdef FLOAT_STORE_FLAG_VALUE
2320 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2321 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2322 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2330 new = simplify_binary_operation (code
, GET_MODE (x
),
2331 XEXP (x
, 0), XEXP (x
, 1));
2336 if (op0_mode
== MAX_MACHINE_MODE
)
2339 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2340 XEXP (x
, 0), XEXP (x
, 1),
2346 validate_change (insn
, loc
, new, 1);
2349 /* Show that register modified no longer contain known constants. We are
2350 called from note_stores with parts of the new insn. */
2353 mark_stores (dest
, x
, data
)
2355 rtx x ATTRIBUTE_UNUSED
;
2356 void *data ATTRIBUTE_UNUSED
;
2359 enum machine_mode mode
= VOIDmode
;
2361 /* DEST is always the innermost thing set, except in the case of
2362 SUBREGs of hard registers. */
2364 if (GET_CODE (dest
) == REG
)
2365 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2366 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2368 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2369 mode
= GET_MODE (SUBREG_REG (dest
));
2374 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2375 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2378 /* Ignore virtual stack var or virtual arg register since those
2379 are handled separately. */
2380 if (regno
!= VIRTUAL_INCOMING_ARGS_REGNUM
2381 && regno
!= VIRTUAL_STACK_VARS_REGNUM
)
2382 for (i
= regno
; i
<= last_reg
; i
++)
2383 if ((size_t) i
< VARRAY_SIZE (global_const_equiv_varray
))
2384 VARRAY_CONST_EQUIV (global_const_equiv_varray
, i
).rtx
= 0;
2388 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2389 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2390 that it points to the node itself, thus indicating that the node is its
2391 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2392 the given node is NULL, recursively descend the decl/block tree which
2393 it is the root of, and for each other ..._DECL or BLOCK node contained
2394 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2395 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2396 values to point to themselves. */
2399 set_block_origin_self (stmt
)
2402 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2404 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2407 register tree local_decl
;
2409 for (local_decl
= BLOCK_VARS (stmt
);
2410 local_decl
!= NULL_TREE
;
2411 local_decl
= TREE_CHAIN (local_decl
))
2412 set_decl_origin_self (local_decl
); /* Potential recursion. */
2416 register tree subblock
;
2418 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2419 subblock
!= NULL_TREE
;
2420 subblock
= BLOCK_CHAIN (subblock
))
2421 set_block_origin_self (subblock
); /* Recurse. */
2426 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2427 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2428 node to so that it points to the node itself, thus indicating that the
2429 node represents its own (abstract) origin. Additionally, if the
2430 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2431 the decl/block tree of which the given node is the root of, and for
2432 each other ..._DECL or BLOCK node contained therein whose
2433 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2434 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2435 point to themselves. */
2438 set_decl_origin_self (decl
)
2441 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2443 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2444 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2448 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2449 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2450 if (DECL_INITIAL (decl
) != NULL_TREE
2451 && DECL_INITIAL (decl
) != error_mark_node
)
2452 set_block_origin_self (DECL_INITIAL (decl
));
2457 /* Given a pointer to some BLOCK node, and a boolean value to set the
2458 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2459 the given block, and for all local decls and all local sub-blocks
2460 (recursively) which are contained therein. */
2463 set_block_abstract_flags (stmt
, setting
)
2465 register int setting
;
2467 register tree local_decl
;
2468 register tree subblock
;
2470 BLOCK_ABSTRACT (stmt
) = setting
;
2472 for (local_decl
= BLOCK_VARS (stmt
);
2473 local_decl
!= NULL_TREE
;
2474 local_decl
= TREE_CHAIN (local_decl
))
2475 set_decl_abstract_flags (local_decl
, setting
);
2477 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2478 subblock
!= NULL_TREE
;
2479 subblock
= BLOCK_CHAIN (subblock
))
2480 set_block_abstract_flags (subblock
, setting
);
2483 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2484 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2485 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2486 set the abstract flags for all of the parameters, local vars, local
2487 blocks and sub-blocks (recursively) to the same setting. */
2490 set_decl_abstract_flags (decl
, setting
)
2492 register int setting
;
2494 DECL_ABSTRACT (decl
) = setting
;
2495 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2499 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2500 DECL_ABSTRACT (arg
) = setting
;
2501 if (DECL_INITIAL (decl
) != NULL_TREE
2502 && DECL_INITIAL (decl
) != error_mark_node
)
2503 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2507 /* Output the assembly language code for the function FNDECL
2508 from its DECL_SAVED_INSNS. Used for inline functions that are output
2509 at end of compilation instead of where they came in the source. */
2512 output_inline_function (fndecl
)
2515 struct function
*curf
= current_function
;
2516 struct function
*f
= DECL_SAVED_INSNS (fndecl
);
2518 current_function
= f
;
2519 current_function_decl
= fndecl
;
2520 clear_emit_caches ();
2522 /* Things we allocate from here on are part of this function, not
2524 temporary_allocation ();
2526 set_new_last_label_num (f
->inl_max_label_num
);
2528 /* We must have already output DWARF debugging information for the
2529 original (abstract) inline function declaration/definition, so
2530 we want to make sure that the debugging information we generate
2531 for this special instance of the inline function refers back to
2532 the information we already generated. To make sure that happens,
2533 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2534 node (and for all of the local ..._DECL nodes which are its children)
2535 so that they all point to themselves. */
2537 set_decl_origin_self (fndecl
);
2539 /* We're not deferring this any longer. */
2540 DECL_DEFER_OUTPUT (fndecl
) = 0;
2542 /* We can't inline this anymore. */
2544 DECL_INLINE (fndecl
) = 0;
2546 /* Compile this function all the way down to assembly code. */
2547 rest_of_compilation (fndecl
);
2549 current_function
= curf
;
2550 current_function_decl
= curf
? curf
->decl
: 0;