1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
31 #include "insn-config.h"
35 #include "integrate.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack
*function_maybepermanent_obstack
;
50 /* Similar, but round to the next highest integer that meets the
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
71 static rtvec initialize_for_inline
PARAMS ((tree
));
72 static void note_modified_parmregs
PARAMS ((rtx
, rtx
, void *));
73 static void integrate_parm_decls
PARAMS ((tree
, struct inline_remap
*,
75 static tree integrate_decl_tree
PARAMS ((tree
,
76 struct inline_remap
*));
77 static void subst_constants
PARAMS ((rtx
*, rtx
,
78 struct inline_remap
*, int));
79 static void set_block_origin_self
PARAMS ((tree
));
80 static void set_block_abstract_flags
PARAMS ((tree
, int));
81 static void process_reg_param
PARAMS ((struct inline_remap
*, rtx
,
83 void set_decl_abstract_flags
PARAMS ((tree
, int));
84 static void mark_stores
PARAMS ((rtx
, rtx
, void *));
85 static void save_parm_insns
PARAMS ((rtx
, rtx
));
86 static void copy_insn_list
PARAMS ((rtx
, struct inline_remap
*,
88 static void copy_insn_notes
PARAMS ((rtx
, struct inline_remap
*,
90 static int compare_blocks
PARAMS ((const PTR
, const PTR
));
91 static int find_block
PARAMS ((const PTR
, const PTR
));
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function
*inlining
= 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map
, i
)
106 struct inline_remap
*map
;
109 rtx x
= map
->label_map
[i
];
112 x
= map
->label_map
[i
] = gen_label_rtx ();
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
123 function_cannot_inline_p (fndecl
)
124 register tree fndecl
;
127 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
129 /* For functions marked as inline increase the maximum size to
130 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns
= (DECL_INLINE (fndecl
))
135 + 8 * list_length (DECL_ARGUMENTS (fndecl
)))
136 : INTEGRATE_THRESHOLD (fndecl
);
138 register int ninsns
= 0;
141 if (DECL_UNINLINABLE (fndecl
))
142 return N_("function cannot be inline");
144 /* No inlines with varargs. */
145 if ((last
&& TREE_VALUE (last
) != void_type_node
)
146 || current_function_varargs
)
147 return N_("varargs function cannot be inline");
149 if (current_function_calls_alloca
)
150 return N_("function using alloca cannot be inline");
152 if (current_function_calls_setjmp
)
153 return N_("function using setjmp cannot be inline");
155 if (current_function_calls_eh_return
)
156 return N_("function uses __builtin_eh_return");
158 if (current_function_contains_functions
)
159 return N_("function with nested functions cannot be inline");
163 N_("function with label addresses used in initializers cannot inline");
165 if (current_function_cannot_inline
)
166 return current_function_cannot_inline
;
168 /* If its not even close, don't even look. */
169 if (get_max_uid () > 3 * max_insns
)
170 return N_("function too large to be inline");
173 /* Don't inline functions which do not specify a function prototype and
174 have BLKmode argument or take the address of a parameter. */
175 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
177 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
178 TREE_ADDRESSABLE (parms
) = 1;
179 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
180 return N_("no prototype, and parameter address used; cannot be inline");
184 /* We can't inline functions that return structures
185 the old-fashioned PCC way, copying into a static block. */
186 if (current_function_returns_pcc_struct
)
187 return N_("inline functions not supported for this return value type");
189 /* We can't inline functions that return structures of varying size. */
190 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl
))) != VOID_TYPE
191 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
192 return N_("function with varying-size return value cannot be inline");
194 /* Cannot inline a function with a varying size argument or one that
195 receives a transparent union. */
196 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
198 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
199 return N_("function with varying-size parameter cannot be inline");
200 else if (TREE_CODE (TREE_TYPE (parms
)) == UNION_TYPE
201 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
202 return N_("function with transparent unit parameter cannot be inline");
205 if (get_max_uid () > max_insns
)
207 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
208 insn
&& ninsns
< max_insns
;
209 insn
= NEXT_INSN (insn
))
213 if (ninsns
>= max_insns
)
214 return N_("function too large to be inline");
217 /* We will not inline a function which uses computed goto. The addresses of
218 its local labels, which may be tucked into global storage, are of course
219 not constant across instantiations, which causes unexpected behaviour. */
220 if (current_function_has_computed_jump
)
221 return N_("function with computed jump cannot inline");
223 /* We cannot inline a nested function that jumps to a nonlocal label. */
224 if (current_function_has_nonlocal_goto
)
225 return N_("function with nonlocal goto cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
230 rtx result
= DECL_RTL (DECL_RESULT (fndecl
));
231 if (GET_CODE (result
) == PARALLEL
)
232 return N_("inline functions not supported for this return value type");
235 /* If the function has a target specific attribute attached to it,
236 then we assume that we should not inline it. This can be overriden
237 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
238 if (DECL_MACHINE_ATTRIBUTES (fndecl
)
239 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl
))
240 return N_("function with target specific attribute(s) cannot be inlined");
245 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
246 Zero for a reg that isn't a parm's home.
247 Only reg numbers less than max_parm_reg are mapped here. */
248 static tree
*parmdecl_map
;
250 /* In save_for_inline, nonzero if past the parm-initialization insns. */
251 static int in_nonparm_insns
;
253 /* Subroutine for `save_for_inline'. Performs initialization
254 needed to save FNDECL's insns and info for future inline expansion. */
257 initialize_for_inline (fndecl
)
264 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
265 memset ((char *) parmdecl_map
, 0, max_parm_reg
* sizeof (tree
));
266 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
268 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
270 parms
= TREE_CHAIN (parms
), i
++)
272 rtx p
= DECL_RTL (parms
);
274 /* If we have (mem (addressof (mem ...))), use the inner MEM since
275 otherwise the copy_rtx call below will not unshare the MEM since
276 it shares ADDRESSOF. */
277 if (GET_CODE (p
) == MEM
&& GET_CODE (XEXP (p
, 0)) == ADDRESSOF
278 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == MEM
)
279 p
= XEXP (XEXP (p
, 0), 0);
281 RTVEC_ELT (arg_vector
, i
) = p
;
283 if (GET_CODE (p
) == REG
)
284 parmdecl_map
[REGNO (p
)] = parms
;
285 else if (GET_CODE (p
) == CONCAT
)
287 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
288 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
290 if (GET_CODE (preal
) == REG
)
291 parmdecl_map
[REGNO (preal
)] = parms
;
292 if (GET_CODE (pimag
) == REG
)
293 parmdecl_map
[REGNO (pimag
)] = parms
;
296 /* This flag is cleared later
297 if the function ever modifies the value of the parm. */
298 TREE_READONLY (parms
) = 1;
304 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
305 originally was in the FROM_FN, but now it will be in the
309 copy_decl_for_inlining (decl
, from_fn
, to_fn
)
316 /* Copy the declaration. */
317 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
319 /* For a parameter, we must make an equivalent VAR_DECL, not a
321 copy
= build_decl (VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
322 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
323 TREE_READONLY (copy
) = TREE_READONLY (decl
);
324 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
328 copy
= copy_node (decl
);
329 if (DECL_LANG_SPECIFIC (copy
))
330 copy_lang_decl (copy
);
332 /* TREE_ADDRESSABLE isn't used to indicate that a label's
333 address has been taken; it's for internal bookkeeping in
334 expand_goto_internal. */
335 if (TREE_CODE (copy
) == LABEL_DECL
)
336 TREE_ADDRESSABLE (copy
) = 0;
339 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
340 declaration inspired this copy. */
341 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
343 /* The new variable/label has no RTL, yet. */
344 SET_DECL_RTL (copy
, NULL_RTX
);
346 /* These args would always appear unused, if not for this. */
347 TREE_USED (copy
) = 1;
349 /* Set the context for the new declaration. */
350 if (!DECL_CONTEXT (decl
))
351 /* Globals stay global. */
353 else if (DECL_CONTEXT (decl
) != from_fn
)
354 /* Things that weren't in the scope of the function we're inlining
355 from aren't in the scope we're inlining too, either. */
357 else if (TREE_STATIC (decl
))
358 /* Function-scoped static variables should say in the original
362 /* Ordinary automatic local variables are now in the scope of the
364 DECL_CONTEXT (copy
) = to_fn
;
369 /* Make the insns and PARM_DECLs of the current function permanent
370 and record other information in DECL_SAVED_INSNS to allow inlining
371 of this function in subsequent calls.
373 This routine need not copy any insns because we are not going
374 to immediately compile the insns in the insn chain. There
375 are two cases when we would compile the insns for FNDECL:
376 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
377 be output at the end of other compilation, because somebody took
378 its address. In the first case, the insns of FNDECL are copied
379 as it is expanded inline, so FNDECL's saved insns are not
380 modified. In the second case, FNDECL is used for the last time,
381 so modifying the rtl is not a problem.
383 We don't have to worry about FNDECL being inline expanded by
384 other functions which are written at the end of compilation
385 because flag_no_inline is turned on when we begin writing
386 functions at the end of compilation. */
389 save_for_inline (fndecl
)
394 rtx first_nonparm_insn
;
396 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
397 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
398 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
399 for the parms, prior to elimination of virtual registers.
400 These values are needed for substituting parms properly. */
402 parmdecl_map
= (tree
*) xmalloc (max_parm_reg
* sizeof (tree
));
404 /* Make and emit a return-label if we have not already done so. */
406 if (return_label
== 0)
408 return_label
= gen_label_rtx ();
409 emit_label (return_label
);
412 argvec
= initialize_for_inline (fndecl
);
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
419 if (GET_CODE (insn
) != NOTE
)
422 /* Get the insn which signals the end of parameter setup code. */
423 first_nonparm_insn
= get_first_nonparm_insn ();
425 /* Now just scan the chain of insns to see what happens to our
426 PARM_DECLs. If a PARM_DECL is used but never modified, we
427 can substitute its rtl directly when expanding inline (and
428 perform constant folding when its incoming value is constant).
429 Otherwise, we have to copy its value into a new register and track
430 the new register's life. */
431 in_nonparm_insns
= 0;
432 save_parm_insns (insn
, first_nonparm_insn
);
434 cfun
->inl_max_label_num
= max_label_num ();
435 cfun
->inl_last_parm_insn
= cfun
->x_last_parm_insn
;
436 cfun
->original_arg_vector
= argvec
;
437 cfun
->original_decl_initial
= DECL_INITIAL (fndecl
);
438 cfun
->no_debugging_symbols
= (write_symbols
== NO_DEBUG
);
439 DECL_SAVED_INSNS (fndecl
) = cfun
;
445 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
446 PARM_DECL is used but never modified, we can substitute its rtl directly
447 when expanding inline (and perform constant folding when its incoming
448 value is constant). Otherwise, we have to copy its value into a new
449 register and track the new register's life. */
452 save_parm_insns (insn
, first_nonparm_insn
)
454 rtx first_nonparm_insn
;
456 if (insn
== NULL_RTX
)
459 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
461 if (insn
== first_nonparm_insn
)
462 in_nonparm_insns
= 1;
466 /* Record what interesting things happen to our parameters. */
467 note_stores (PATTERN (insn
), note_modified_parmregs
, NULL
);
469 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
470 three attached sequences: normal call, sibling call and tail
472 if (GET_CODE (insn
) == CALL_INSN
473 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
477 for (i
= 0; i
< 3; i
++)
478 save_parm_insns (XEXP (PATTERN (insn
), i
),
485 /* Note whether a parameter is modified or not. */
488 note_modified_parmregs (reg
, x
, data
)
490 rtx x ATTRIBUTE_UNUSED
;
491 void *data ATTRIBUTE_UNUSED
;
493 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
494 && REGNO (reg
) < max_parm_reg
495 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
496 && parmdecl_map
[REGNO (reg
)] != 0)
497 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
500 /* Unfortunately, we need a global copy of const_equiv map for communication
501 with a function called from note_stores. Be *very* careful that this
502 is used properly in the presence of recursion. */
504 varray_type global_const_equiv_varray
;
506 #define FIXED_BASE_PLUS_P(X) \
507 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
508 && GET_CODE (XEXP (X, 0)) == REG \
509 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
510 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
512 /* Called to set up a mapping for the case where a parameter is in a
513 register. If it is read-only and our argument is a constant, set up the
514 constant equivalence.
516 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
519 Also, don't allow hard registers here; they might not be valid when
520 substituted into insns. */
522 process_reg_param (map
, loc
, copy
)
523 struct inline_remap
*map
;
526 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
527 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
528 && ! REG_USERVAR_P (copy
))
529 || (GET_CODE (copy
) == REG
530 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
532 rtx temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
533 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
534 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
535 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
538 map
->reg_map
[REGNO (loc
)] = copy
;
541 /* Compare two BLOCKs for qsort. The key we sort on is the
542 BLOCK_ABSTRACT_ORIGIN of the blocks. */
545 compare_blocks (v1
, v2
)
549 tree b1
= *((const tree
*) v1
);
550 tree b2
= *((const tree
*) v2
);
552 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1
)
553 - (char *) BLOCK_ABSTRACT_ORIGIN (b2
));
556 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
557 an original block; the second to a remapped equivalent. */
564 const union tree_node
*b1
= (const union tree_node
*) v1
;
565 tree b2
= *((const tree
*) v2
);
567 return ((const char *) b1
- (char *) BLOCK_ABSTRACT_ORIGIN (b2
));
570 /* Integrate the procedure defined by FNDECL. Note that this function
571 may wind up calling itself. Since the static variables are not
572 reentrant, we do not assign them until after the possibility
573 of recursion is eliminated.
575 If IGNORE is nonzero, do not produce a value.
576 Otherwise store the value in TARGET if it is nonzero and that is convenient.
579 (rtx)-1 if we could not substitute the function
580 0 if we substituted it and it does not produce a value
581 else an rtx for where the value is stored. */
584 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
585 structure_value_addr
)
590 rtx structure_value_addr
;
592 struct function
*inlining_previous
;
593 struct function
*inl_f
= DECL_SAVED_INSNS (fndecl
);
594 tree formal
, actual
, block
;
595 rtx parm_insns
= inl_f
->emit
->x_first_insn
;
596 rtx insns
= (inl_f
->inl_last_parm_insn
597 ? NEXT_INSN (inl_f
->inl_last_parm_insn
)
603 int min_labelno
= inl_f
->emit
->x_first_label_num
;
604 int max_labelno
= inl_f
->inl_max_label_num
;
609 struct inline_remap
*map
= 0;
613 rtvec arg_vector
= (rtvec
) inl_f
->original_arg_vector
;
614 rtx static_chain_value
= 0;
616 int eh_region_offset
;
618 /* The pointer used to track the true location of the memory used
619 for MAP->LABEL_MAP. */
620 rtx
*real_label_map
= 0;
622 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
623 max_regno
= inl_f
->emit
->x_reg_rtx_no
+ 3;
624 if (max_regno
< FIRST_PSEUDO_REGISTER
)
627 /* Pull out the decl for the function definition; fndecl may be a
628 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
629 fndecl
= inl_f
->decl
;
631 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
633 if (cfun
->preferred_stack_boundary
< inl_f
->preferred_stack_boundary
)
634 cfun
->preferred_stack_boundary
= inl_f
->preferred_stack_boundary
;
636 /* Check that the parms type match and that sufficient arguments were
637 passed. Since the appropriate conversions or default promotions have
638 already been applied, the machine modes should match exactly. */
640 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
642 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
645 enum machine_mode mode
;
648 return (rtx
) (HOST_WIDE_INT
) -1;
650 arg
= TREE_VALUE (actual
);
651 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
653 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
654 /* If they are block mode, the types should match exactly.
655 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
656 which could happen if the parameter has incomplete type. */
658 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg
))
659 != TYPE_MAIN_VARIANT (TREE_TYPE (formal
)))))
660 return (rtx
) (HOST_WIDE_INT
) -1;
663 /* Extra arguments are valid, but will be ignored below, so we must
664 evaluate them here for side-effects. */
665 for (; actual
; actual
= TREE_CHAIN (actual
))
666 expand_expr (TREE_VALUE (actual
), const0_rtx
,
667 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
669 /* Expand the function arguments. Do this first so that any
670 new registers get created before we allocate the maps. */
672 arg_vals
= (rtx
*) xmalloc (nargs
* sizeof (rtx
));
673 arg_trees
= (tree
*) xmalloc (nargs
* sizeof (tree
));
675 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
677 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
679 /* Actual parameter, converted to the type of the argument within the
681 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
682 /* Mode of the variable used within the function. */
683 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
687 loc
= RTVEC_ELT (arg_vector
, i
);
689 /* If this is an object passed by invisible reference, we copy the
690 object into a stack slot and save its address. If this will go
691 into memory, we do nothing now. Otherwise, we just expand the
693 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
694 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
696 rtx stack_slot
= assign_temp (TREE_TYPE (arg
), 1, 1, 1);
698 store_expr (arg
, stack_slot
, 0);
699 arg_vals
[i
] = XEXP (stack_slot
, 0);
702 else if (GET_CODE (loc
) != MEM
)
704 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
705 /* The mode if LOC and ARG can differ if LOC was a variable
706 that had its mode promoted via PROMOTED_MODE. */
707 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
708 TYPE_MODE (TREE_TYPE (arg
)),
709 expand_expr (arg
, NULL_RTX
, mode
,
711 TREE_UNSIGNED (TREE_TYPE (formal
)));
713 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
719 && (! TREE_READONLY (formal
)
720 /* If the parameter is not read-only, copy our argument through
721 a register. Also, we cannot use ARG_VALS[I] if it overlaps
722 TARGET in any way. In the inline function, they will likely
723 be two different pseudos, and `safe_from_p' will make all
724 sorts of smart assumptions about their not conflicting.
725 But if ARG_VALS[I] overlaps TARGET, these assumptions are
726 wrong, so put ARG_VALS[I] into a fresh register.
727 Don't worry about invisible references, since their stack
728 temps will never overlap the target. */
731 && (GET_CODE (arg_vals
[i
]) == REG
732 || GET_CODE (arg_vals
[i
]) == SUBREG
733 || GET_CODE (arg_vals
[i
]) == MEM
)
734 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
735 /* ??? We must always copy a SUBREG into a REG, because it might
736 get substituted into an address, and not all ports correctly
737 handle SUBREGs in addresses. */
738 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
739 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
741 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
742 && POINTER_TYPE_P (TREE_TYPE (formal
)))
743 mark_reg_pointer (arg_vals
[i
],
744 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
))));
747 /* Allocate the structures we use to remap things. */
749 map
= (struct inline_remap
*) xcalloc (1, sizeof (struct inline_remap
));
750 map
->fndecl
= fndecl
;
752 VARRAY_TREE_INIT (map
->block_map
, 10, "block_map");
753 map
->reg_map
= (rtx
*) xcalloc (max_regno
, sizeof (rtx
));
755 /* We used to use alloca here, but the size of what it would try to
756 allocate would occasionally cause it to exceed the stack limit and
757 cause unpredictable core dumps. */
759 = (rtx
*) xmalloc ((max_labelno
) * sizeof (rtx
));
760 map
->label_map
= real_label_map
;
761 map
->local_return_label
= NULL_RTX
;
763 inl_max_uid
= (inl_f
->emit
->x_cur_insn_uid
+ 1);
764 map
->insn_map
= (rtx
*) xcalloc (inl_max_uid
, sizeof (rtx
));
766 map
->max_insnno
= inl_max_uid
;
768 map
->integrating
= 1;
769 map
->compare_src
= NULL_RTX
;
770 map
->compare_mode
= VOIDmode
;
772 /* const_equiv_varray maps pseudos in our routine to constants, so
773 it needs to be large enough for all our pseudos. This is the
774 number we are currently using plus the number in the called
775 routine, plus 15 for each arg, five to compute the virtual frame
776 pointer, and five for the return value. This should be enough
777 for most cases. We do not reference entries outside the range of
780 ??? These numbers are quite arbitrary and were obtained by
781 experimentation. At some point, we should try to allocate the
782 table after all the parameters are set up so we an more accurately
783 estimate the number of pseudos we will need. */
785 VARRAY_CONST_EQUIV_INIT (map
->const_equiv_varray
,
787 + (max_regno
- FIRST_PSEUDO_REGISTER
)
790 "expand_inline_function");
793 /* Record the current insn in case we have to set up pointers to frame
794 and argument memory blocks. If there are no insns yet, add a dummy
795 insn that can be used as an insertion point. */
796 map
->insns_at_start
= get_last_insn ();
797 if (map
->insns_at_start
== 0)
798 map
->insns_at_start
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
800 map
->regno_pointer_align
= inl_f
->emit
->regno_pointer_align
;
801 map
->x_regno_reg_rtx
= inl_f
->emit
->x_regno_reg_rtx
;
803 /* Update the outgoing argument size to allow for those in the inlined
805 if (inl_f
->outgoing_args_size
> current_function_outgoing_args_size
)
806 current_function_outgoing_args_size
= inl_f
->outgoing_args_size
;
808 /* If the inline function needs to make PIC references, that means
809 that this function's PIC offset table must be used. */
810 if (inl_f
->uses_pic_offset_table
)
811 current_function_uses_pic_offset_table
= 1;
813 /* If this function needs a context, set it up. */
814 if (inl_f
->needs_context
)
815 static_chain_value
= lookup_static_chain (fndecl
);
817 if (GET_CODE (parm_insns
) == NOTE
818 && NOTE_LINE_NUMBER (parm_insns
) > 0)
820 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
821 NOTE_LINE_NUMBER (parm_insns
));
823 RTX_INTEGRATED_P (note
) = 1;
826 /* Process each argument. For each, set up things so that the function's
827 reference to the argument will refer to the argument being passed.
828 We only replace REG with REG here. Any simplifications are done
831 We make two passes: In the first, we deal with parameters that will
832 be placed into registers, since we need to ensure that the allocated
833 register number fits in const_equiv_map. Then we store all non-register
834 parameters into their memory location. */
836 /* Don't try to free temp stack slots here, because we may put one of the
837 parameters into a temp stack slot. */
839 for (i
= 0; i
< nargs
; i
++)
841 rtx copy
= arg_vals
[i
];
843 loc
= RTVEC_ELT (arg_vector
, i
);
845 /* There are three cases, each handled separately. */
846 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
847 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
849 /* This must be an object passed by invisible reference (it could
850 also be a variable-sized object, but we forbid inlining functions
851 with variable-sized arguments). COPY is the address of the
852 actual value (this computation will cause it to be copied). We
853 map that address for the register, noting the actual address as
854 an equivalent in case it can be substituted into the insns. */
856 if (GET_CODE (copy
) != REG
)
858 temp
= copy_addr_to_reg (copy
);
859 if (CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
860 SET_CONST_EQUIV_DATA (map
, temp
, copy
, CONST_AGE_PARM
);
863 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
865 else if (GET_CODE (loc
) == MEM
)
867 /* This is the case of a parameter that lives in memory. It
868 will live in the block we allocate in the called routine's
869 frame that simulates the incoming argument area. Do nothing
870 with the parameter now; we will call store_expr later. In
871 this case, however, we must ensure that the virtual stack and
872 incoming arg rtx values are expanded now so that we can be
873 sure we have enough slots in the const equiv map since the
874 store_expr call can easily blow the size estimate. */
875 if (DECL_SAVED_INSNS (fndecl
)->args_size
!= 0)
876 copy_rtx_and_substitute (virtual_incoming_args_rtx
, map
, 0);
878 else if (GET_CODE (loc
) == REG
)
879 process_reg_param (map
, loc
, copy
);
880 else if (GET_CODE (loc
) == CONCAT
)
882 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
883 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
884 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
885 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
887 process_reg_param (map
, locreal
, copyreal
);
888 process_reg_param (map
, locimag
, copyimag
);
894 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
895 specially. This function can be called recursively, so we need to
896 save the previous value. */
897 inlining_previous
= inlining
;
900 /* Now do the parameters that will be placed in memory. */
902 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
903 formal
; formal
= TREE_CHAIN (formal
), i
++)
905 loc
= RTVEC_ELT (arg_vector
, i
);
907 if (GET_CODE (loc
) == MEM
908 /* Exclude case handled above. */
909 && ! (GET_CODE (XEXP (loc
, 0)) == REG
910 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
912 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
913 DECL_SOURCE_LINE (formal
));
915 RTX_INTEGRATED_P (note
) = 1;
917 /* Compute the address in the area we reserved and store the
919 temp
= copy_rtx_and_substitute (loc
, map
, 1);
920 subst_constants (&temp
, NULL_RTX
, map
, 1);
921 apply_change_group ();
922 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
923 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
924 store_expr (arg_trees
[i
], temp
, 0);
928 /* Deal with the places that the function puts its result.
929 We are driven by what is placed into DECL_RESULT.
931 Initially, we assume that we don't have anything special handling for
932 REG_FUNCTION_RETURN_VALUE_P. */
934 map
->inline_target
= 0;
935 loc
= (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
936 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
938 if (TYPE_MODE (type
) == VOIDmode
)
939 /* There is no return value to worry about. */
941 else if (GET_CODE (loc
) == MEM
)
943 if (GET_CODE (XEXP (loc
, 0)) == ADDRESSOF
)
945 temp
= copy_rtx_and_substitute (loc
, map
, 1);
946 subst_constants (&temp
, NULL_RTX
, map
, 1);
947 apply_change_group ();
952 if (! structure_value_addr
953 || ! aggregate_value_p (DECL_RESULT (fndecl
)))
956 /* Pass the function the address in which to return a structure
957 value. Note that a constructor can cause someone to call us
958 with STRUCTURE_VALUE_ADDR, but the initialization takes place
959 via the first parameter, rather than the struct return address.
961 We have two cases: If the address is a simple register
962 indirect, use the mapping mechanism to point that register to
963 our structure return address. Otherwise, store the structure
964 return value into the place that it will be referenced from. */
966 if (GET_CODE (XEXP (loc
, 0)) == REG
)
968 temp
= force_operand (structure_value_addr
, NULL_RTX
);
969 temp
= force_reg (Pmode
, temp
);
970 /* A virtual register might be invalid in an insn, because
971 it can cause trouble in reload. Since we don't have access
972 to the expanders at map translation time, make sure we have
973 a proper register now.
974 If a virtual register is actually valid, cse or combine
975 can put it into the mapped insns. */
976 if (REGNO (temp
) >= FIRST_VIRTUAL_REGISTER
977 && REGNO (temp
) <= LAST_VIRTUAL_REGISTER
)
978 temp
= copy_to_mode_reg (Pmode
, temp
);
979 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
981 if (CONSTANT_P (structure_value_addr
)
982 || GET_CODE (structure_value_addr
) == ADDRESSOF
983 || (GET_CODE (structure_value_addr
) == PLUS
984 && (XEXP (structure_value_addr
, 0)
985 == virtual_stack_vars_rtx
)
986 && (GET_CODE (XEXP (structure_value_addr
, 1))
989 SET_CONST_EQUIV_DATA (map
, temp
, structure_value_addr
,
995 temp
= copy_rtx_and_substitute (loc
, map
, 1);
996 subst_constants (&temp
, NULL_RTX
, map
, 0);
997 apply_change_group ();
998 emit_move_insn (temp
, structure_value_addr
);
1003 /* We will ignore the result value, so don't look at its structure.
1004 Note that preparations for an aggregate return value
1005 do need to be made (above) even if it will be ignored. */
1007 else if (GET_CODE (loc
) == REG
)
1009 /* The function returns an object in a register and we use the return
1010 value. Set up our target for remapping. */
1012 /* Machine mode function was declared to return. */
1013 enum machine_mode departing_mode
= TYPE_MODE (type
);
1014 /* (Possibly wider) machine mode it actually computes
1015 (for the sake of callers that fail to declare it right).
1016 We have to use the mode of the result's RTL, rather than
1017 its type, since expand_function_start may have promoted it. */
1018 enum machine_mode arriving_mode
1019 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
1022 /* Don't use MEMs as direct targets because on some machines
1023 substituting a MEM for a REG makes invalid insns.
1024 Let the combiner substitute the MEM if that is valid. */
1025 if (target
== 0 || GET_CODE (target
) != REG
1026 || GET_MODE (target
) != departing_mode
)
1028 /* Don't make BLKmode registers. If this looks like
1029 a BLKmode object being returned in a register, get
1030 the mode from that, otherwise abort. */
1031 if (departing_mode
== BLKmode
)
1033 if (REG
== GET_CODE (DECL_RTL (DECL_RESULT (fndecl
))))
1035 departing_mode
= GET_MODE (DECL_RTL (DECL_RESULT (fndecl
)));
1036 arriving_mode
= departing_mode
;
1042 target
= gen_reg_rtx (departing_mode
);
1045 /* If function's value was promoted before return,
1046 avoid machine mode mismatch when we substitute INLINE_TARGET.
1047 But TARGET is what we will return to the caller. */
1048 if (arriving_mode
!= departing_mode
)
1050 /* Avoid creating a paradoxical subreg wider than
1051 BITS_PER_WORD, since that is illegal. */
1052 if (GET_MODE_BITSIZE (arriving_mode
) > BITS_PER_WORD
)
1054 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode
),
1055 GET_MODE_BITSIZE (arriving_mode
)))
1056 /* Maybe could be handled by using convert_move () ? */
1058 reg_to_map
= gen_reg_rtx (arriving_mode
);
1059 target
= gen_lowpart (departing_mode
, reg_to_map
);
1062 reg_to_map
= gen_rtx_SUBREG (arriving_mode
, target
, 0);
1065 reg_to_map
= target
;
1067 /* Usually, the result value is the machine's return register.
1068 Sometimes it may be a pseudo. Handle both cases. */
1069 if (REG_FUNCTION_VALUE_P (loc
))
1070 map
->inline_target
= reg_to_map
;
1072 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1077 /* Remap the exception handler data pointer from one to the other. */
1078 temp
= get_exception_pointer (inl_f
);
1080 map
->reg_map
[REGNO (temp
)] = get_exception_pointer (cfun
);
1082 /* Initialize label_map. get_label_from_map will actually make
1084 memset ((char *) &map
->label_map
[min_labelno
], 0,
1085 (max_labelno
- min_labelno
) * sizeof (rtx
));
1087 /* Make copies of the decls of the symbols in the inline function, so that
1088 the copies of the variables get declared in the current function. Set
1089 up things so that lookup_static_chain knows that to interpret registers
1090 in SAVE_EXPRs for TYPE_SIZEs as local. */
1091 inline_function_decl
= fndecl
;
1092 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1093 block
= integrate_decl_tree (inl_f
->original_decl_initial
, map
);
1094 BLOCK_ABSTRACT_ORIGIN (block
) = DECL_ORIGIN (fndecl
);
1095 inline_function_decl
= 0;
1097 /* Make a fresh binding contour that we can easily remove. Do this after
1098 expanding our arguments so cleanups are properly scoped. */
1099 expand_start_bindings_and_block (0, block
);
1101 /* Sort the block-map so that it will be easy to find remapped
1103 qsort (&VARRAY_TREE (map
->block_map
, 0),
1104 map
->block_map
->elements_used
,
1108 /* Perform postincrements before actually calling the function. */
1111 /* Clean up stack so that variables might have smaller offsets. */
1112 do_pending_stack_adjust ();
1114 /* Save a copy of the location of const_equiv_varray for
1115 mark_stores, called via note_stores. */
1116 global_const_equiv_varray
= map
->const_equiv_varray
;
1118 /* If the called function does an alloca, save and restore the
1119 stack pointer around the call. This saves stack space, but
1120 also is required if this inline is being done between two
1122 if (inl_f
->calls_alloca
)
1123 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1125 /* Now copy the insns one by one. */
1126 copy_insn_list (insns
, map
, static_chain_value
);
1128 /* Duplicate the EH regions. This will create an offset from the
1129 region numbers in the function we're inlining to the region
1130 numbers in the calling function. This must wait until after
1131 copy_insn_list, as we need the insn map to be complete. */
1132 eh_region_offset
= duplicate_eh_regions (inl_f
, map
);
1134 /* Now copy the REG_NOTES for those insns. */
1135 copy_insn_notes (insns
, map
, eh_region_offset
);
1137 /* If the insn sequence required one, emit the return label. */
1138 if (map
->local_return_label
)
1139 emit_label (map
->local_return_label
);
1141 /* Restore the stack pointer if we saved it above. */
1142 if (inl_f
->calls_alloca
)
1143 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1145 if (! cfun
->x_whole_function_mode_p
)
1146 /* In statement-at-a-time mode, we just tell the front-end to add
1147 this block to the list of blocks at this binding level. We
1148 can't do it the way it's done for function-at-a-time mode the
1149 superblocks have not been created yet. */
1150 insert_block (block
);
1154 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl
));
1155 BLOCK_CHAIN (DECL_INITIAL (current_function_decl
)) = block
;
1158 /* End the scope containing the copied formal parameter variables
1159 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1160 here so that expand_end_bindings will not check for unused
1161 variables. That's already been checked for when the inlined
1162 function was defined. */
1163 expand_end_bindings (NULL_TREE
, 1, 1);
1165 /* Must mark the line number note after inlined functions as a repeat, so
1166 that the test coverage code can avoid counting the call twice. This
1167 just tells the code to ignore the immediately following line note, since
1168 there already exists a copy of this note before the expanded inline call.
1169 This line number note is still needed for debugging though, so we can't
1171 if (flag_test_coverage
)
1172 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER
);
1174 emit_line_note (input_filename
, lineno
);
1176 /* If the function returns a BLKmode object in a register, copy it
1177 out of the temp register into a BLKmode memory object. */
1179 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
1180 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
1181 target
= copy_blkmode_from_reg (0, target
, TREE_TYPE (TREE_TYPE (fndecl
)));
1183 if (structure_value_addr
)
1185 target
= gen_rtx_MEM (TYPE_MODE (type
),
1186 memory_address (TYPE_MODE (type
),
1187 structure_value_addr
));
1188 set_mem_attributes (target
, type
, 1);
1191 /* Make sure we free the things we explicitly allocated with xmalloc. */
1193 free (real_label_map
);
1194 VARRAY_FREE (map
->const_equiv_varray
);
1195 free (map
->reg_map
);
1196 VARRAY_FREE (map
->block_map
);
1197 free (map
->insn_map
);
1202 inlining
= inlining_previous
;
1207 /* Make copies of each insn in the given list using the mapping
1208 computed in expand_inline_function. This function may call itself for
1209 insns containing sequences.
1211 Copying is done in two passes, first the insns and then their REG_NOTES.
1213 If static_chain_value is non-zero, it represents the context-pointer
1214 register for the function. */
1217 copy_insn_list (insns
, map
, static_chain_value
)
1219 struct inline_remap
*map
;
1220 rtx static_chain_value
;
1229 /* Copy the insns one by one. Do this in two passes, first the insns and
1230 then their REG_NOTES. */
1232 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1234 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1236 rtx copy
, pattern
, set
;
1238 map
->orig_asm_operands_vector
= 0;
1240 switch (GET_CODE (insn
))
1243 pattern
= PATTERN (insn
);
1244 set
= single_set (insn
);
1246 if (GET_CODE (pattern
) == USE
1247 && GET_CODE (XEXP (pattern
, 0)) == REG
1248 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1249 /* The (USE (REG n)) at return from the function should
1250 be ignored since we are changing (REG n) into
1254 /* Ignore setting a function value that we don't want to use. */
1255 if (map
->inline_target
== 0
1257 && GET_CODE (SET_DEST (set
)) == REG
1258 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1260 if (volatile_refs_p (SET_SRC (set
)))
1264 /* If we must not delete the source,
1265 load it into a new temporary. */
1266 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1268 new_set
= single_set (copy
);
1273 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1275 /* If the source and destination are the same and it
1276 has a note on it, keep the insn. */
1277 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1278 && REG_NOTES (insn
) != 0)
1279 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1284 /* Similarly if an ignored return value is clobbered. */
1285 else if (map
->inline_target
== 0
1286 && GET_CODE (pattern
) == CLOBBER
1287 && GET_CODE (XEXP (pattern
, 0)) == REG
1288 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1291 /* If this is setting the static chain rtx, omit it. */
1292 else if (static_chain_value
!= 0
1294 && GET_CODE (SET_DEST (set
)) == REG
1295 && rtx_equal_p (SET_DEST (set
),
1296 static_chain_incoming_rtx
))
1299 /* If this is setting the static chain pseudo, set it from
1300 the value we want to give it instead. */
1301 else if (static_chain_value
!= 0
1303 && rtx_equal_p (SET_SRC (set
),
1304 static_chain_incoming_rtx
))
1306 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
, 1);
1308 copy
= emit_move_insn (newdest
, static_chain_value
);
1309 static_chain_value
= 0;
1312 /* If this is setting the virtual stack vars register, this must
1313 be the code at the handler for a builtin longjmp. The value
1314 saved in the setjmp buffer will be the address of the frame
1315 we've made for this inlined instance within our frame. But we
1316 know the offset of that value so we can use it to reconstruct
1317 our virtual stack vars register from that value. If we are
1318 copying it from the stack pointer, leave it unchanged. */
1320 && rtx_equal_p (SET_DEST (set
), virtual_stack_vars_rtx
))
1322 HOST_WIDE_INT offset
;
1323 temp
= map
->reg_map
[REGNO (SET_DEST (set
))];
1324 temp
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
1327 if (rtx_equal_p (temp
, virtual_stack_vars_rtx
))
1329 else if (GET_CODE (temp
) == PLUS
1330 && rtx_equal_p (XEXP (temp
, 0), virtual_stack_vars_rtx
)
1331 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
)
1332 offset
= INTVAL (XEXP (temp
, 1));
1336 if (rtx_equal_p (SET_SRC (set
), stack_pointer_rtx
))
1337 temp
= SET_SRC (set
);
1339 temp
= force_operand (plus_constant (SET_SRC (set
),
1343 copy
= emit_move_insn (virtual_stack_vars_rtx
, temp
);
1347 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
, 0));
1348 /* REG_NOTES will be copied later. */
1351 /* If this insn is setting CC0, it may need to look at
1352 the insn that uses CC0 to see what type of insn it is.
1353 In that case, the call to recog via validate_change will
1354 fail. So don't substitute constants here. Instead,
1355 do it when we emit the following insn.
1357 For example, see the pyr.md file. That machine has signed and
1358 unsigned compares. The compare patterns must check the
1359 following branch insn to see which what kind of compare to
1362 If the previous insn set CC0, substitute constants on it as
1364 if (sets_cc0_p (PATTERN (copy
)) != 0)
1369 try_constants (cc0_insn
, map
);
1371 try_constants (copy
, map
);
1374 try_constants (copy
, map
);
1379 if (map
->integrating
&& returnjump_p (insn
))
1381 if (map
->local_return_label
== 0)
1382 map
->local_return_label
= gen_label_rtx ();
1383 pattern
= gen_jump (map
->local_return_label
);
1386 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1388 copy
= emit_jump_insn (pattern
);
1392 try_constants (cc0_insn
, map
);
1395 try_constants (copy
, map
);
1397 /* If this used to be a conditional jump insn but whose branch
1398 direction is now know, we must do something special. */
1399 if (any_condjump_p (insn
) && onlyjump_p (insn
) && map
->last_pc_value
)
1402 /* If the previous insn set cc0 for us, delete it. */
1403 if (sets_cc0_p (PREV_INSN (copy
)))
1404 delete_insn (PREV_INSN (copy
));
1407 /* If this is now a no-op, delete it. */
1408 if (map
->last_pc_value
== pc_rtx
)
1414 /* Otherwise, this is unconditional jump so we must put a
1415 BARRIER after it. We could do some dead code elimination
1416 here, but jump.c will do it just as well. */
1422 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1423 three attached sequences: normal call, sibling call and tail
1425 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1430 for (i
= 0; i
< 3; i
++)
1434 sequence
[i
] = NULL_RTX
;
1435 seq
= XEXP (PATTERN (insn
), i
);
1439 copy_insn_list (seq
, map
, static_chain_value
);
1440 sequence
[i
] = get_insns ();
1445 /* Find the new tail recursion label.
1446 It will already be substituted into sequence[2]. */
1447 tail_label
= copy_rtx_and_substitute (XEXP (PATTERN (insn
), 3),
1450 copy
= emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode
,
1458 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
, 0);
1459 copy
= emit_call_insn (pattern
);
1461 SIBLING_CALL_P (copy
) = SIBLING_CALL_P (insn
);
1462 CONST_CALL_P (copy
) = CONST_CALL_P (insn
);
1464 /* Because the USAGE information potentially contains objects other
1465 than hard registers, we need to copy it. */
1467 CALL_INSN_FUNCTION_USAGE (copy
)
1468 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
),
1473 try_constants (cc0_insn
, map
);
1476 try_constants (copy
, map
);
1478 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1479 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1480 VARRAY_CONST_EQUIV (map
->const_equiv_varray
, i
).rtx
= 0;
1484 copy
= emit_label (get_label_from_map (map
,
1485 CODE_LABEL_NUMBER (insn
)));
1486 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1491 copy
= emit_barrier ();
1495 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1496 discarded because it is important to have only one of
1497 each in the current function.
1499 NOTE_INSN_DELETED notes aren't useful.
1501 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1502 pointer (which will soon be dangling) confuses flow's
1503 attempts to preserve bb structures during the compilation
1506 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1507 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1508 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
1509 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
)
1511 copy
= emit_note (NOTE_SOURCE_FILE (insn
),
1512 NOTE_LINE_NUMBER (insn
));
1514 && (NOTE_LINE_NUMBER (copy
) == NOTE_INSN_BLOCK_BEG
1515 || NOTE_LINE_NUMBER (copy
) == NOTE_INSN_BLOCK_END
)
1516 && NOTE_BLOCK (insn
))
1518 tree
*mapped_block_p
;
1521 = (tree
*) bsearch (NOTE_BLOCK (insn
),
1522 &VARRAY_TREE (map
->block_map
, 0),
1523 map
->block_map
->elements_used
,
1527 if (!mapped_block_p
)
1530 NOTE_BLOCK (copy
) = *mapped_block_p
;
1533 && NOTE_LINE_NUMBER (copy
) == NOTE_INSN_EXPECTED_VALUE
)
1534 NOTE_EXPECTED_VALUE (copy
)
1535 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn
),
1547 RTX_INTEGRATED_P (copy
) = 1;
1549 map
->insn_map
[INSN_UID (insn
)] = copy
;
1553 /* Copy the REG_NOTES. Increment const_age, so that only constants
1554 from parameters can be substituted in. These are the only ones
1555 that are valid across the entire function. */
1558 copy_insn_notes (insns
, map
, eh_region_offset
)
1560 struct inline_remap
*map
;
1561 int eh_region_offset
;
1566 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1568 if (! INSN_P (insn
))
1571 new_insn
= map
->insn_map
[INSN_UID (insn
)];
1575 if (REG_NOTES (insn
))
1577 rtx next
, note
= copy_rtx_and_substitute (REG_NOTES (insn
), map
, 0);
1579 /* We must also do subst_constants, in case one of our parameters
1580 has const type and constant value. */
1581 subst_constants (¬e
, NULL_RTX
, map
, 0);
1582 apply_change_group ();
1583 REG_NOTES (new_insn
) = note
;
1585 /* Delete any REG_LABEL notes from the chain. Remap any
1586 REG_EH_REGION notes. */
1587 for (; note
; note
= next
)
1589 next
= XEXP (note
, 1);
1590 if (REG_NOTE_KIND (note
) == REG_LABEL
)
1591 remove_note (new_insn
, note
);
1592 else if (REG_NOTE_KIND (note
) == REG_EH_REGION
)
1593 XEXP (note
, 0) = GEN_INT (INTVAL (XEXP (note
, 0))
1594 + eh_region_offset
);
1598 if (GET_CODE (insn
) == CALL_INSN
1599 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1602 for (i
= 0; i
< 3; i
++)
1603 copy_insn_notes (XEXP (PATTERN (insn
), i
), map
, eh_region_offset
);
1606 if (GET_CODE (insn
) == JUMP_INSN
1607 && GET_CODE (PATTERN (insn
)) == RESX
)
1608 XINT (PATTERN (new_insn
), 0) += eh_region_offset
;
1612 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1613 push all of those decls and give each one the corresponding home. */
1616 integrate_parm_decls (args
, map
, arg_vector
)
1618 struct inline_remap
*map
;
1624 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1626 tree decl
= copy_decl_for_inlining (tail
, map
->fndecl
,
1627 current_function_decl
);
1629 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
, 1);
1631 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1632 here, but that's going to require some more work. */
1633 /* DECL_INCOMING_RTL (decl) = ?; */
1634 /* Fully instantiate the address with the equivalent form so that the
1635 debugging information contains the actual register, instead of the
1636 virtual register. Do this by not passing an insn to
1638 subst_constants (&new_decl_rtl
, NULL_RTX
, map
, 1);
1639 apply_change_group ();
1640 SET_DECL_RTL (decl
, new_decl_rtl
);
1644 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1645 current function a tree of contexts isomorphic to the one that is given.
1647 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1648 registers used in the DECL_RTL field should be remapped. If it is zero,
1649 no mapping is necessary. */
1652 integrate_decl_tree (let
, map
)
1654 struct inline_remap
*map
;
1660 new_block
= make_node (BLOCK
);
1661 VARRAY_PUSH_TREE (map
->block_map
, new_block
);
1662 next
= &BLOCK_VARS (new_block
);
1664 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1668 d
= copy_decl_for_inlining (t
, map
->fndecl
, current_function_decl
);
1670 if (DECL_RTL_SET_P (t
))
1674 SET_DECL_RTL (d
, copy_rtx_and_substitute (DECL_RTL (t
), map
, 1));
1676 /* Fully instantiate the address with the equivalent form so that the
1677 debugging information contains the actual register, instead of the
1678 virtual register. Do this by not passing an insn to
1681 subst_constants (&r
, NULL_RTX
, map
, 1);
1682 SET_DECL_RTL (d
, r
);
1683 apply_change_group ();
1686 /* Add this declaration to the list of variables in the new
1689 next
= &TREE_CHAIN (d
);
1692 next
= &BLOCK_SUBBLOCKS (new_block
);
1693 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1695 *next
= integrate_decl_tree (t
, map
);
1696 BLOCK_SUPERCONTEXT (*next
) = new_block
;
1697 next
= &BLOCK_CHAIN (*next
);
1700 TREE_USED (new_block
) = TREE_USED (let
);
1701 BLOCK_ABSTRACT_ORIGIN (new_block
) = let
;
1706 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1707 except for those few rtx codes that are sharable.
1709 We always return an rtx that is similar to that incoming rtx, with the
1710 exception of possibly changing a REG to a SUBREG or vice versa. No
1711 rtl is ever emitted.
1713 If FOR_LHS is nonzero, if means we are processing something that will
1714 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1715 inlining since we need to be conservative in how it is set for
1718 Handle constants that need to be placed in the constant pool by
1719 calling `force_const_mem'. */
1722 copy_rtx_and_substitute (orig
, map
, for_lhs
)
1724 struct inline_remap
*map
;
1727 register rtx copy
, temp
;
1729 register RTX_CODE code
;
1730 register enum machine_mode mode
;
1731 register const char *format_ptr
;
1737 code
= GET_CODE (orig
);
1738 mode
= GET_MODE (orig
);
1743 /* If the stack pointer register shows up, it must be part of
1744 stack-adjustments (*not* because we eliminated the frame pointer!).
1745 Small hard registers are returned as-is. Pseudo-registers
1746 go through their `reg_map'. */
1747 regno
= REGNO (orig
);
1748 if (regno
<= LAST_VIRTUAL_REGISTER
1749 || (map
->integrating
1750 && DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
== orig
))
1752 /* Some hard registers are also mapped,
1753 but others are not translated. */
1754 if (map
->reg_map
[regno
] != 0)
1755 return map
->reg_map
[regno
];
1757 /* If this is the virtual frame pointer, make space in current
1758 function's stack frame for the stack frame of the inline function.
1760 Copy the address of this area into a pseudo. Map
1761 virtual_stack_vars_rtx to this pseudo and set up a constant
1762 equivalence for it to be the address. This will substitute the
1763 address into insns where it can be substituted and use the new
1764 pseudo where it can't. */
1765 else if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
1768 int size
= get_func_frame_size (DECL_SAVED_INSNS (map
->fndecl
));
1769 #ifdef FRAME_GROWS_DOWNWARD
1771 = (DECL_SAVED_INSNS (map
->fndecl
)->stack_alignment_needed
1774 /* In this case, virtual_stack_vars_rtx points to one byte
1775 higher than the top of the frame area. So make sure we
1776 allocate a big enough chunk to keep the frame pointer
1777 aligned like a real one. */
1779 size
= CEIL_ROUND (size
, alignment
);
1782 loc
= assign_stack_temp (BLKmode
, size
, 1);
1783 loc
= XEXP (loc
, 0);
1784 #ifdef FRAME_GROWS_DOWNWARD
1785 /* In this case, virtual_stack_vars_rtx points to one byte
1786 higher than the top of the frame area. So compute the offset
1787 to one byte higher than our substitute frame. */
1788 loc
= plus_constant (loc
, size
);
1790 map
->reg_map
[regno
] = temp
1791 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1793 #ifdef STACK_BOUNDARY
1794 mark_reg_pointer (map
->reg_map
[regno
], STACK_BOUNDARY
);
1797 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1799 seq
= gen_sequence ();
1801 emit_insn_after (seq
, map
->insns_at_start
);
1804 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
1805 || (map
->integrating
1806 && (DECL_SAVED_INSNS (map
->fndecl
)->internal_arg_pointer
1809 /* Do the same for a block to contain any arguments referenced
1812 int size
= DECL_SAVED_INSNS (map
->fndecl
)->args_size
;
1815 loc
= assign_stack_temp (BLKmode
, size
, 1);
1816 loc
= XEXP (loc
, 0);
1817 /* When arguments grow downward, the virtual incoming
1818 args pointer points to the top of the argument block,
1819 so the remapped location better do the same. */
1820 #ifdef ARGS_GROW_DOWNWARD
1821 loc
= plus_constant (loc
, size
);
1823 map
->reg_map
[regno
] = temp
1824 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
1826 #ifdef STACK_BOUNDARY
1827 mark_reg_pointer (map
->reg_map
[regno
], STACK_BOUNDARY
);
1830 SET_CONST_EQUIV_DATA (map
, temp
, loc
, CONST_AGE_PARM
);
1832 seq
= gen_sequence ();
1834 emit_insn_after (seq
, map
->insns_at_start
);
1837 else if (REG_FUNCTION_VALUE_P (orig
))
1839 /* This is a reference to the function return value. If
1840 the function doesn't have a return value, error. If the
1841 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1842 if (map
->inline_target
== 0)
1844 if (rtx_equal_function_value_matters
)
1845 /* This is an ignored return value. We must not
1846 leave it in with REG_FUNCTION_VALUE_P set, since
1847 that would confuse subsequent inlining of the
1848 current function into a later function. */
1849 return gen_rtx_REG (GET_MODE (orig
), regno
);
1851 /* Must be unrolling loops or replicating code if we
1852 reach here, so return the register unchanged. */
1855 else if (GET_MODE (map
->inline_target
) != BLKmode
1856 && mode
!= GET_MODE (map
->inline_target
))
1857 return gen_lowpart (mode
, map
->inline_target
);
1859 return map
->inline_target
;
1861 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1862 /* If leaf_renumber_regs_insn() might remap this register to
1863 some other number, make sure we don't share it with the
1864 inlined function, otherwise delayed optimization of the
1865 inlined function may change it in place, breaking our
1866 reference to it. We may still shared it within the
1867 function, so create an entry for this register in the
1869 if (map
->integrating
&& regno
< FIRST_PSEUDO_REGISTER
1870 && LEAF_REGISTERS
[regno
] && LEAF_REG_REMAP (regno
) != regno
)
1872 if (!map
->leaf_reg_map
[regno
][mode
])
1873 map
->leaf_reg_map
[regno
][mode
] = gen_rtx_REG (mode
, regno
);
1874 return map
->leaf_reg_map
[regno
][mode
];
1882 if (map
->reg_map
[regno
] == NULL
)
1884 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
1885 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
1886 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
1887 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
1888 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1890 if (REG_POINTER (map
->x_regno_reg_rtx
[regno
]))
1891 mark_reg_pointer (map
->reg_map
[regno
],
1892 map
->regno_pointer_align
[regno
]);
1894 return map
->reg_map
[regno
];
1897 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
, for_lhs
);
1898 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1899 if (GET_CODE (copy
) == SUBREG
)
1900 return gen_rtx_SUBREG (GET_MODE (orig
), SUBREG_REG (copy
),
1901 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
1902 else if (GET_CODE (copy
) == CONCAT
)
1904 rtx retval
= subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1);
1906 if (GET_MODE (retval
) == GET_MODE (orig
))
1909 return gen_rtx_SUBREG (GET_MODE (orig
), retval
,
1910 (SUBREG_WORD (orig
) %
1911 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig
)))
1912 / (unsigned) UNITS_PER_WORD
)));
1915 return gen_rtx_SUBREG (GET_MODE (orig
), copy
,
1916 SUBREG_WORD (orig
));
1919 copy
= gen_rtx_ADDRESSOF (mode
,
1920 copy_rtx_and_substitute (XEXP (orig
, 0),
1922 0, ADDRESSOF_DECL (orig
));
1923 regno
= ADDRESSOF_REGNO (orig
);
1924 if (map
->reg_map
[regno
])
1925 regno
= REGNO (map
->reg_map
[regno
]);
1926 else if (regno
> LAST_VIRTUAL_REGISTER
)
1928 temp
= XEXP (orig
, 0);
1929 map
->reg_map
[regno
] = gen_reg_rtx (GET_MODE (temp
));
1930 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (temp
);
1931 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (temp
);
1932 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (temp
);
1933 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1935 if (REG_POINTER (map
->x_regno_reg_rtx
[regno
]))
1936 mark_reg_pointer (map
->reg_map
[regno
],
1937 map
->regno_pointer_align
[regno
]);
1938 regno
= REGNO (map
->reg_map
[regno
]);
1940 ADDRESSOF_REGNO (copy
) = regno
;
1945 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1946 to (use foo) if the original insn didn't have a subreg.
1947 Removing the subreg distorts the VAX movstrhi pattern
1948 by changing the mode of an operand. */
1949 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
, code
== CLOBBER
);
1950 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
1951 copy
= SUBREG_REG (copy
);
1952 return gen_rtx_fmt_e (code
, VOIDmode
, copy
);
1955 LABEL_PRESERVE_P (get_label_from_map (map
, CODE_LABEL_NUMBER (orig
)))
1956 = LABEL_PRESERVE_P (orig
);
1957 return get_label_from_map (map
, CODE_LABEL_NUMBER (orig
));
1959 /* We need to handle "deleted" labels that appear in the DECL_RTL
1962 if (NOTE_LINE_NUMBER (orig
) == NOTE_INSN_DELETED_LABEL
)
1963 return map
->insn_map
[INSN_UID (orig
)];
1970 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1971 : get_label_from_map (map
, CODE_LABEL_NUMBER (XEXP (orig
, 0))));
1973 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
1975 /* The fact that this label was previously nonlocal does not mean
1976 it still is, so we must check if it is within the range of
1977 this function's labels. */
1978 LABEL_REF_NONLOCAL_P (copy
)
1979 = (LABEL_REF_NONLOCAL_P (orig
)
1980 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
1981 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
1983 /* If we have made a nonlocal label local, it means that this
1984 inlined call will be referring to our nonlocal goto handler.
1985 So make sure we create one for this block; we normally would
1986 not since this is not otherwise considered a "call". */
1987 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
1988 function_call_count
++;
1998 /* Symbols which represent the address of a label stored in the constant
1999 pool must be modified to point to a constant pool entry for the
2000 remapped label. Otherwise, symbols are returned unchanged. */
2001 if (CONSTANT_POOL_ADDRESS_P (orig
))
2003 struct function
*f
= inlining
? inlining
: cfun
;
2004 rtx constant
= get_pool_constant_for_function (f
, orig
);
2005 enum machine_mode const_mode
= get_pool_mode_for_function (f
, orig
);
2008 rtx temp
= force_const_mem (const_mode
,
2009 copy_rtx_and_substitute (constant
,
2013 /* Legitimizing the address here is incorrect.
2015 Since we had a SYMBOL_REF before, we can assume it is valid
2016 to have one in this position in the insn.
2018 Also, change_address may create new registers. These
2019 registers will not have valid reg_map entries. This can
2020 cause try_constants() to fail because assumes that all
2021 registers in the rtx have valid reg_map entries, and it may
2022 end up replacing one of these new registers with junk. */
2024 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
2025 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
2028 temp
= XEXP (temp
, 0);
2030 #ifdef POINTERS_EXTEND_UNSIGNED
2031 if (GET_MODE (temp
) != GET_MODE (orig
))
2032 temp
= convert_memory_address (GET_MODE (orig
), temp
);
2036 else if (GET_CODE (constant
) == LABEL_REF
)
2037 return XEXP (force_const_mem
2039 copy_rtx_and_substitute (constant
, map
, for_lhs
)),
2046 /* We have to make a new copy of this CONST_DOUBLE because don't want
2047 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2048 duplicate of a CONST_DOUBLE we have already seen. */
2049 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
2053 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
2054 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
2057 return immed_double_const (CONST_DOUBLE_LOW (orig
),
2058 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
2061 /* Make new constant pool entry for a constant
2062 that was in the pool of the inline function. */
2063 if (RTX_INTEGRATED_P (orig
))
2068 /* If a single asm insn contains multiple output operands then
2069 it contains multiple ASM_OPERANDS rtx's that share the input
2070 and constraint vecs. We must make sure that the copied insn
2071 continues to share it. */
2072 if (map
->orig_asm_operands_vector
== ASM_OPERANDS_INPUT_VEC (orig
))
2074 copy
= rtx_alloc (ASM_OPERANDS
);
2075 copy
->volatil
= orig
->volatil
;
2076 PUT_MODE (copy
, GET_MODE (orig
));
2077 ASM_OPERANDS_TEMPLATE (copy
) = ASM_OPERANDS_TEMPLATE (orig
);
2078 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy
)
2079 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig
);
2080 ASM_OPERANDS_OUTPUT_IDX (copy
) = ASM_OPERANDS_OUTPUT_IDX (orig
);
2081 ASM_OPERANDS_INPUT_VEC (copy
) = map
->copy_asm_operands_vector
;
2082 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
)
2083 = map
->copy_asm_constraints_vector
;
2084 ASM_OPERANDS_SOURCE_FILE (copy
) = ASM_OPERANDS_SOURCE_FILE (orig
);
2085 ASM_OPERANDS_SOURCE_LINE (copy
) = ASM_OPERANDS_SOURCE_LINE (orig
);
2091 /* This is given special treatment because the first
2092 operand of a CALL is a (MEM ...) which may get
2093 forced into a register for cse. This is undesirable
2094 if function-address cse isn't wanted or if we won't do cse. */
2095 #ifndef NO_FUNCTION_CSE
2096 if (! (optimize
&& ! flag_no_function_cse
))
2101 gen_rtx_MEM (GET_MODE (XEXP (orig
, 0)),
2102 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0),
2104 copy_rtx_and_substitute (XEXP (orig
, 1), map
, 0));
2108 /* Must be ifdefed out for loop unrolling to work. */
2114 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2115 Adjust the setting by the offset of the area we made.
2116 If the nonlocal goto is into the current function,
2117 this will result in unnecessarily bad code, but should work. */
2118 if (SET_DEST (orig
) == virtual_stack_vars_rtx
2119 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
2121 /* In case a translation hasn't occurred already, make one now. */
2124 HOST_WIDE_INT loc_offset
;
2126 copy_rtx_and_substitute (SET_DEST (orig
), map
, for_lhs
);
2127 equiv_reg
= map
->reg_map
[REGNO (SET_DEST (orig
))];
2128 equiv_loc
= VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
2129 REGNO (equiv_reg
)).rtx
;
2131 = GET_CODE (equiv_loc
) == REG
? 0 : INTVAL (XEXP (equiv_loc
, 1));
2133 return gen_rtx_SET (VOIDmode
, SET_DEST (orig
),
2136 (copy_rtx_and_substitute (SET_SRC (orig
),
2142 return gen_rtx_SET (VOIDmode
,
2143 copy_rtx_and_substitute (SET_DEST (orig
), map
, 1),
2144 copy_rtx_and_substitute (SET_SRC (orig
), map
, 0));
2149 && GET_CODE (XEXP (orig
, 0)) == SYMBOL_REF
2150 && CONSTANT_POOL_ADDRESS_P (XEXP (orig
, 0)))
2152 enum machine_mode const_mode
2153 = get_pool_mode_for_function (inlining
, XEXP (orig
, 0));
2155 = get_pool_constant_for_function (inlining
, XEXP (orig
, 0));
2157 constant
= copy_rtx_and_substitute (constant
, map
, 0);
2159 /* If this was an address of a constant pool entry that itself
2160 had to be placed in the constant pool, it might not be a
2161 valid address. So the recursive call might have turned it
2162 into a register. In that case, it isn't a constant any
2163 more, so return it. This has the potential of changing a
2164 MEM into a REG, but we'll assume that it safe. */
2165 if (! CONSTANT_P (constant
))
2168 return validize_mem (force_const_mem (const_mode
, constant
));
2171 copy
= rtx_alloc (MEM
);
2172 PUT_MODE (copy
, mode
);
2173 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
, 0);
2174 MEM_COPY_ATTRIBUTES (copy
, orig
);
2181 copy
= rtx_alloc (code
);
2182 PUT_MODE (copy
, mode
);
2183 copy
->in_struct
= orig
->in_struct
;
2184 copy
->volatil
= orig
->volatil
;
2185 copy
->unchanging
= orig
->unchanging
;
2187 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2189 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2191 switch (*format_ptr
++)
2194 /* Copy this through the wide int field; that's safest. */
2195 X0WINT (copy
, i
) = X0WINT (orig
, i
);
2200 = copy_rtx_and_substitute (XEXP (orig
, i
), map
, for_lhs
);
2204 /* Change any references to old-insns to point to the
2205 corresponding copied insns. */
2206 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2210 XVEC (copy
, i
) = XVEC (orig
, i
);
2211 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2213 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2214 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2215 XVECEXP (copy
, i
, j
)
2216 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
),
2222 XWINT (copy
, i
) = XWINT (orig
, i
);
2226 XINT (copy
, i
) = XINT (orig
, i
);
2230 XSTR (copy
, i
) = XSTR (orig
, i
);
2234 XTREE (copy
, i
) = XTREE (orig
, i
);
2242 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2244 map
->orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
2245 map
->copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
2246 map
->copy_asm_constraints_vector
2247 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
2253 /* Substitute known constant values into INSN, if that is valid. */
2256 try_constants (insn
, map
)
2258 struct inline_remap
*map
;
2264 /* First try just updating addresses, then other things. This is
2265 important when we have something like the store of a constant
2266 into memory and we can update the memory address but the machine
2267 does not support a constant source. */
2268 subst_constants (&PATTERN (insn
), insn
, map
, 1);
2269 apply_change_group ();
2270 subst_constants (&PATTERN (insn
), insn
, map
, 0);
2271 apply_change_group ();
2273 /* Show we don't know the value of anything stored or clobbered. */
2274 note_stores (PATTERN (insn
), mark_stores
, NULL
);
2275 map
->last_pc_value
= 0;
2277 map
->last_cc0_value
= 0;
2280 /* Set up any constant equivalences made in this insn. */
2281 for (i
= 0; i
< map
->num_sets
; i
++)
2283 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2285 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2287 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map
, regno
);
2288 if (VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
).rtx
== 0
2289 /* Following clause is a hack to make case work where GNU C++
2290 reassigns a variable to make cse work right. */
2291 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map
->const_equiv_varray
,
2293 map
->equiv_sets
[i
].equiv
))
2294 SET_CONST_EQUIV_DATA (map
, map
->equiv_sets
[i
].dest
,
2295 map
->equiv_sets
[i
].equiv
, map
->const_age
);
2297 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2298 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2300 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2301 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2306 /* Substitute known constants for pseudo regs in the contents of LOC,
2307 which are part of INSN.
2308 If INSN is zero, the substitution should always be done (this is used to
2310 These changes are taken out by try_constants if the result is not valid.
2312 Note that we are more concerned with determining when the result of a SET
2313 is a constant, for further propagation, than actually inserting constants
2314 into insns; cse will do the latter task better.
2316 This function is also used to adjust address of items previously addressed
2317 via the virtual stack variable or virtual incoming arguments registers.
2319 If MEMONLY is nonzero, only make changes inside a MEM. */
2322 subst_constants (loc
, insn
, map
, memonly
)
2325 struct inline_remap
*map
;
2330 register enum rtx_code code
;
2331 register const char *format_ptr
;
2332 int num_changes
= num_validated_changes ();
2334 enum machine_mode op0_mode
= MAX_MACHINE_MODE
;
2336 code
= GET_CODE (x
);
2352 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2358 /* The only thing we can do with a USE or CLOBBER is possibly do
2359 some substitutions in a MEM within it. */
2360 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2361 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
, 0);
2365 /* Substitute for parms and known constants. Don't replace
2366 hard regs used as user variables with constants. */
2369 int regno
= REGNO (x
);
2370 struct const_equiv_data
*p
;
2372 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2373 && (size_t) regno
< VARRAY_SIZE (map
->const_equiv_varray
)
2374 && (p
= &VARRAY_CONST_EQUIV (map
->const_equiv_varray
, regno
),
2376 && p
->age
>= map
->const_age
)
2377 validate_change (insn
, loc
, p
->rtx
, 1);
2382 /* SUBREG applied to something other than a reg
2383 should be treated as ordinary, since that must
2384 be a special hack and we don't know how to treat it specially.
2385 Consider for example mulsidi3 in m68k.md.
2386 Ordinary SUBREG of a REG needs this special treatment. */
2387 if (! memonly
&& GET_CODE (SUBREG_REG (x
)) == REG
)
2389 rtx inner
= SUBREG_REG (x
);
2392 /* We can't call subst_constants on &SUBREG_REG (x) because any
2393 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2394 see what is inside, try to form the new SUBREG and see if that is
2395 valid. We handle two cases: extracting a full word in an
2396 integral mode and extracting the low part. */
2397 subst_constants (&inner
, NULL_RTX
, map
, 0);
2399 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2400 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2401 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2402 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2403 GET_MODE (SUBREG_REG (x
)));
2405 cancel_changes (num_changes
);
2406 if (new == 0 && subreg_lowpart_p (x
))
2407 new = gen_lowpart_common (GET_MODE (x
), inner
);
2410 validate_change (insn
, loc
, new, 1);
2417 subst_constants (&XEXP (x
, 0), insn
, map
, 0);
2419 /* If a memory address got spoiled, change it back. */
2420 if (! memonly
&& insn
!= 0 && num_validated_changes () != num_changes
2421 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2422 cancel_changes (num_changes
);
2427 /* Substitute constants in our source, and in any arguments to a
2428 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2430 rtx
*dest_loc
= &SET_DEST (x
);
2431 rtx dest
= *dest_loc
;
2433 enum machine_mode compare_mode
= VOIDmode
;
2435 /* If SET_SRC is a COMPARE which subst_constants would turn into
2436 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2438 if (GET_CODE (SET_SRC (x
)) == COMPARE
)
2441 if (GET_MODE_CLASS (GET_MODE (src
)) == MODE_CC
2447 compare_mode
= GET_MODE (XEXP (src
, 0));
2448 if (compare_mode
== VOIDmode
)
2449 compare_mode
= GET_MODE (XEXP (src
, 1));
2453 subst_constants (&SET_SRC (x
), insn
, map
, memonly
);
2456 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2457 || GET_CODE (*dest_loc
) == SUBREG
2458 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2460 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2462 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
, memonly
);
2463 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
, memonly
);
2465 dest_loc
= &XEXP (*dest_loc
, 0);
2468 /* Do substitute in the address of a destination in memory. */
2469 if (GET_CODE (*dest_loc
) == MEM
)
2470 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
, 0);
2472 /* Check for the case of DEST a SUBREG, both it and the underlying
2473 register are less than one word, and the SUBREG has the wider mode.
2474 In the case, we are really setting the underlying register to the
2475 source converted to the mode of DEST. So indicate that. */
2476 if (GET_CODE (dest
) == SUBREG
2477 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2478 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2479 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2480 <= GET_MODE_SIZE (GET_MODE (dest
)))
2481 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2483 src
= tem
, dest
= SUBREG_REG (dest
);
2485 /* If storing a recognizable value save it for later recording. */
2486 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2487 && (CONSTANT_P (src
)
2488 || (GET_CODE (src
) == REG
2489 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2490 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2491 || (GET_CODE (src
) == PLUS
2492 && GET_CODE (XEXP (src
, 0)) == REG
2493 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2494 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2495 && CONSTANT_P (XEXP (src
, 1)))
2496 || GET_CODE (src
) == COMPARE
2501 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2502 || GET_CODE (src
) == LABEL_REF
))))
2504 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2505 it will cause us to save the COMPARE with any constants
2506 substituted, which is what we want for later. */
2507 rtx src_copy
= copy_rtx (src
);
2508 map
->equiv_sets
[map
->num_sets
].equiv
= src_copy
;
2509 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2510 if (compare_mode
!= VOIDmode
2511 && GET_CODE (src
) == COMPARE
2512 && (GET_MODE_CLASS (GET_MODE (src
)) == MODE_CC
2517 && GET_MODE (XEXP (src
, 0)) == VOIDmode
2518 && GET_MODE (XEXP (src
, 1)) == VOIDmode
)
2520 map
->compare_src
= src_copy
;
2521 map
->compare_mode
= compare_mode
;
2531 format_ptr
= GET_RTX_FORMAT (code
);
2533 /* If the first operand is an expression, save its mode for later. */
2534 if (*format_ptr
== 'e')
2535 op0_mode
= GET_MODE (XEXP (x
, 0));
2537 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2539 switch (*format_ptr
++)
2546 subst_constants (&XEXP (x
, i
), insn
, map
, memonly
);
2558 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2559 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2560 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
, memonly
);
2569 /* If this is a commutative operation, move a constant to the second
2570 operand unless the second operand is already a CONST_INT. */
2572 && (GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2573 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2575 rtx tem
= XEXP (x
, 0);
2576 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2577 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2580 /* Simplify the expression in case we put in some constants. */
2582 switch (GET_RTX_CLASS (code
))
2585 if (op0_mode
== MAX_MACHINE_MODE
)
2587 new = simplify_unary_operation (code
, GET_MODE (x
),
2588 XEXP (x
, 0), op0_mode
);
2593 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2595 if (op_mode
== VOIDmode
)
2596 op_mode
= GET_MODE (XEXP (x
, 1));
2597 new = simplify_relational_operation (code
, op_mode
,
2598 XEXP (x
, 0), XEXP (x
, 1));
2599 #ifdef FLOAT_STORE_FLAG_VALUE
2600 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2602 enum machine_mode mode
= GET_MODE (x
);
2603 if (new == const0_rtx
)
2604 new = CONST0_RTX (mode
);
2607 REAL_VALUE_TYPE val
= FLOAT_STORE_FLAG_VALUE (mode
);
2608 new = CONST_DOUBLE_FROM_REAL_VALUE (val
, mode
);
2617 new = simplify_binary_operation (code
, GET_MODE (x
),
2618 XEXP (x
, 0), XEXP (x
, 1));
2623 if (op0_mode
== MAX_MACHINE_MODE
)
2626 if (code
== IF_THEN_ELSE
)
2628 rtx op0
= XEXP (x
, 0);
2630 if (GET_RTX_CLASS (GET_CODE (op0
)) == '<'
2631 && GET_MODE (op0
) == VOIDmode
2632 && ! side_effects_p (op0
)
2633 && XEXP (op0
, 0) == map
->compare_src
2634 && GET_MODE (XEXP (op0
, 1)) == VOIDmode
)
2636 /* We have compare of two VOIDmode constants for which
2637 we recorded the comparison mode. */
2639 simplify_relational_operation (GET_CODE (op0
),
2644 if (temp
== const0_rtx
)
2646 else if (temp
== const1_rtx
)
2651 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2652 XEXP (x
, 0), XEXP (x
, 1),
2658 validate_change (insn
, loc
, new, 1);
2661 /* Show that register modified no longer contain known constants. We are
2662 called from note_stores with parts of the new insn. */
2665 mark_stores (dest
, x
, data
)
2667 rtx x ATTRIBUTE_UNUSED
;
2668 void *data ATTRIBUTE_UNUSED
;
2671 enum machine_mode mode
= VOIDmode
;
2673 /* DEST is always the innermost thing set, except in the case of
2674 SUBREGs of hard registers. */
2676 if (GET_CODE (dest
) == REG
)
2677 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2678 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2680 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2681 mode
= GET_MODE (SUBREG_REG (dest
));
2686 unsigned int uregno
= regno
;
2687 unsigned int last_reg
= (uregno
>= FIRST_PSEUDO_REGISTER
? uregno
2688 : uregno
+ HARD_REGNO_NREGS (uregno
, mode
) - 1);
2691 /* Ignore virtual stack var or virtual arg register since those
2692 are handled separately. */
2693 if (uregno
!= VIRTUAL_INCOMING_ARGS_REGNUM
2694 && uregno
!= VIRTUAL_STACK_VARS_REGNUM
)
2695 for (i
= uregno
; i
<= last_reg
; i
++)
2696 if ((size_t) i
< VARRAY_SIZE (global_const_equiv_varray
))
2697 VARRAY_CONST_EQUIV (global_const_equiv_varray
, i
).rtx
= 0;
2701 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2702 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2703 that it points to the node itself, thus indicating that the node is its
2704 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2705 the given node is NULL, recursively descend the decl/block tree which
2706 it is the root of, and for each other ..._DECL or BLOCK node contained
2707 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2708 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2709 values to point to themselves. */
2712 set_block_origin_self (stmt
)
2715 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2717 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2720 register tree local_decl
;
2722 for (local_decl
= BLOCK_VARS (stmt
);
2723 local_decl
!= NULL_TREE
;
2724 local_decl
= TREE_CHAIN (local_decl
))
2725 set_decl_origin_self (local_decl
); /* Potential recursion. */
2729 register tree subblock
;
2731 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2732 subblock
!= NULL_TREE
;
2733 subblock
= BLOCK_CHAIN (subblock
))
2734 set_block_origin_self (subblock
); /* Recurse. */
2739 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2740 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2741 node to so that it points to the node itself, thus indicating that the
2742 node represents its own (abstract) origin. Additionally, if the
2743 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2744 the decl/block tree of which the given node is the root of, and for
2745 each other ..._DECL or BLOCK node contained therein whose
2746 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2747 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2748 point to themselves. */
2751 set_decl_origin_self (decl
)
2754 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2756 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2757 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2761 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2762 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2763 if (DECL_INITIAL (decl
) != NULL_TREE
2764 && DECL_INITIAL (decl
) != error_mark_node
)
2765 set_block_origin_self (DECL_INITIAL (decl
));
2770 /* Given a pointer to some BLOCK node, and a boolean value to set the
2771 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2772 the given block, and for all local decls and all local sub-blocks
2773 (recursively) which are contained therein. */
2776 set_block_abstract_flags (stmt
, setting
)
2778 register int setting
;
2780 register tree local_decl
;
2781 register tree subblock
;
2783 BLOCK_ABSTRACT (stmt
) = setting
;
2785 for (local_decl
= BLOCK_VARS (stmt
);
2786 local_decl
!= NULL_TREE
;
2787 local_decl
= TREE_CHAIN (local_decl
))
2788 set_decl_abstract_flags (local_decl
, setting
);
2790 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2791 subblock
!= NULL_TREE
;
2792 subblock
= BLOCK_CHAIN (subblock
))
2793 set_block_abstract_flags (subblock
, setting
);
2796 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2797 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2798 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2799 set the abstract flags for all of the parameters, local vars, local
2800 blocks and sub-blocks (recursively) to the same setting. */
2803 set_decl_abstract_flags (decl
, setting
)
2805 register int setting
;
2807 DECL_ABSTRACT (decl
) = setting
;
2808 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2812 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2813 DECL_ABSTRACT (arg
) = setting
;
2814 if (DECL_INITIAL (decl
) != NULL_TREE
2815 && DECL_INITIAL (decl
) != error_mark_node
)
2816 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2820 /* Output the assembly language code for the function FNDECL
2821 from its DECL_SAVED_INSNS. Used for inline functions that are output
2822 at end of compilation instead of where they came in the source. */
2825 output_inline_function (fndecl
)
2828 struct function
*old_cfun
= cfun
;
2829 enum debug_info_type old_write_symbols
= write_symbols
;
2830 struct function
*f
= DECL_SAVED_INSNS (fndecl
);
2833 current_function_decl
= fndecl
;
2834 clear_emit_caches ();
2836 set_new_last_label_num (f
->inl_max_label_num
);
2838 /* We're not deferring this any longer. */
2839 DECL_DEFER_OUTPUT (fndecl
) = 0;
2841 /* If requested, suppress debugging information. */
2842 if (f
->no_debugging_symbols
)
2843 write_symbols
= NO_DEBUG
;
2845 /* Do any preparation, such as emitting abstract debug info for the inline
2846 before it gets mangled by optimization. */
2847 note_outlining_of_inline_function (fndecl
);
2849 /* Compile this function all the way down to assembly code. */
2850 rest_of_compilation (fndecl
);
2852 /* We can't inline this anymore. */
2854 DECL_INLINE (fndecl
) = 0;
2857 current_function_decl
= old_cfun
? old_cfun
->decl
: 0;
2858 write_symbols
= old_write_symbols
;