(arm_comp_type_attributes): Simply and comment tests on type attributes.
[official-gcc.git] / gcc / integrate.c
blobb3a22a90a166e23e4cefc09ad6b8ab574da9ec88
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "insn-flags.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 static rtvec initialize_for_inline PARAMS ((tree));
66 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
67 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
68 rtvec));
69 static tree integrate_decl_tree PARAMS ((tree,
70 struct inline_remap *));
71 static void subst_constants PARAMS ((rtx *, rtx,
72 struct inline_remap *, int));
73 static void set_block_origin_self PARAMS ((tree));
74 static void set_decl_origin_self PARAMS ((tree));
75 static void set_block_abstract_flags PARAMS ((tree, int));
76 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
77 rtx));
78 void set_decl_abstract_flags PARAMS ((tree, int));
79 static rtx expand_inline_function_eh_labelmap PARAMS ((rtx));
80 static void mark_stores PARAMS ((rtx, rtx, void *));
81 static int compare_blocks PARAMS ((const PTR, const PTR));
82 static int find_block PARAMS ((const PTR, const PTR));
84 /* The maximum number of instructions accepted for inlining a
85 function. Increasing values mean more agressive inlining.
86 This affects currently only functions explicitly marked as
87 inline (or methods defined within the class definition for C++).
88 The default value of 10000 is arbitrary but high to match the
89 previously unlimited gcc capabilities. */
91 int inline_max_insns = 10000;
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
109 rtx x = map->label_map[i];
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx();
114 return x;
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
129 /* For functions marked as inline increase the maximum size to
130 inline_max_insns (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (inline_max_insns
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
138 register int ninsns = 0;
139 register tree parms;
140 rtx result;
142 /* No inlines with varargs. */
143 if ((last && TREE_VALUE (last) != void_type_node)
144 || current_function_varargs)
145 return N_("varargs function cannot be inline");
147 if (current_function_calls_alloca)
148 return N_("function using alloca cannot be inline");
150 if (current_function_calls_setjmp)
151 return N_("function using setjmp cannot be inline");
153 if (current_function_contains_functions)
154 return N_("function with nested functions cannot be inline");
156 if (forced_labels)
157 return
158 N_("function with label addresses used in initializers cannot inline");
160 if (current_function_cannot_inline)
161 return current_function_cannot_inline;
163 /* If its not even close, don't even look. */
164 if (get_max_uid () > 3 * max_insns)
165 return N_("function too large to be inline");
167 #if 0
168 /* Don't inline functions which do not specify a function prototype and
169 have BLKmode argument or take the address of a parameter. */
170 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
172 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
173 TREE_ADDRESSABLE (parms) = 1;
174 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
175 return N_("no prototype, and parameter address used; cannot be inline");
177 #endif
179 /* We can't inline functions that return structures
180 the old-fashioned PCC way, copying into a static block. */
181 if (current_function_returns_pcc_struct)
182 return N_("inline functions not supported for this return value type");
184 /* We can't inline functions that return structures of varying size. */
185 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
186 return N_("function with varying-size return value cannot be inline");
188 /* Cannot inline a function with a varying size argument or one that
189 receives a transparent union. */
190 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
192 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
193 return N_("function with varying-size parameter cannot be inline");
194 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
195 return N_("function with transparent unit parameter cannot be inline");
198 if (get_max_uid () > max_insns)
200 for (ninsns = 0, insn = get_first_nonparm_insn ();
201 insn && ninsns < max_insns;
202 insn = NEXT_INSN (insn))
203 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
204 ninsns++;
206 if (ninsns >= max_insns)
207 return N_("function too large to be inline");
210 /* We will not inline a function which uses computed goto. The addresses of
211 its local labels, which may be tucked into global storage, are of course
212 not constant across instantiations, which causes unexpected behaviour. */
213 if (current_function_has_computed_jump)
214 return N_("function with computed jump cannot inline");
216 /* We cannot inline a nested function that jumps to a nonlocal label. */
217 if (current_function_has_nonlocal_goto)
218 return N_("function with nonlocal goto cannot be inline");
220 /* This is a hack, until the inliner is taught about eh regions at
221 the start of the function. */
222 for (insn = get_insns ();
223 insn
224 && ! (GET_CODE (insn) == NOTE
225 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
226 insn = NEXT_INSN (insn))
228 if (insn && GET_CODE (insn) == NOTE
229 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
230 return N_("function with complex parameters cannot be inline");
233 /* We can't inline functions that return a PARALLEL rtx. */
234 result = DECL_RTL (DECL_RESULT (fndecl));
235 if (result && GET_CODE (result) == PARALLEL)
236 return N_("inline functions not supported for this return value type");
238 return 0;
241 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
242 Zero for a reg that isn't a parm's home.
243 Only reg numbers less than max_parm_reg are mapped here. */
244 static tree *parmdecl_map;
246 /* In save_for_inline, nonzero if past the parm-initialization insns. */
247 static int in_nonparm_insns;
249 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
250 needed to save FNDECL's insns and info for future inline expansion. */
252 static rtvec
253 initialize_for_inline (fndecl)
254 tree fndecl;
256 int i;
257 rtvec arg_vector;
258 tree parms;
260 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
261 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
262 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
264 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
265 parms;
266 parms = TREE_CHAIN (parms), i++)
268 rtx p = DECL_RTL (parms);
270 /* If we have (mem (addressof (mem ...))), use the inner MEM since
271 otherwise the copy_rtx call below will not unshare the MEM since
272 it shares ADDRESSOF. */
273 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
274 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
275 p = XEXP (XEXP (p, 0), 0);
277 RTVEC_ELT (arg_vector, i) = p;
279 if (GET_CODE (p) == REG)
280 parmdecl_map[REGNO (p)] = parms;
281 else if (GET_CODE (p) == CONCAT)
283 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
284 rtx pimag = gen_imagpart (GET_MODE (preal), p);
286 if (GET_CODE (preal) == REG)
287 parmdecl_map[REGNO (preal)] = parms;
288 if (GET_CODE (pimag) == REG)
289 parmdecl_map[REGNO (pimag)] = parms;
292 /* This flag is cleared later
293 if the function ever modifies the value of the parm. */
294 TREE_READONLY (parms) = 1;
297 return arg_vector;
300 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
301 originally was in the FROM_FN, but now it will be in the
302 TO_FN. */
304 tree
305 copy_decl_for_inlining (decl, from_fn, to_fn)
306 tree decl;
307 tree from_fn;
308 tree to_fn;
310 tree copy;
312 /* Copy the declaration. */
313 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
314 /* For a parameter, we must make an equivalent VAR_DECL, not a
315 new PARM_DECL. */
316 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
317 else
319 copy = copy_node (decl);
320 if (DECL_LANG_SPECIFIC (copy))
321 copy_lang_decl (copy);
323 /* TREE_ADDRESSABLE isn't used to indicate that a label's
324 address has been taken; it's for internal bookkeeping in
325 expand_goto_internal. */
326 if (TREE_CODE (copy) == LABEL_DECL)
327 TREE_ADDRESSABLE (copy) = 0;
330 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
331 declaration inspired this copy. */
332 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
334 /* The new variable/label has no RTL, yet. */
335 DECL_RTL (copy) = NULL_RTX;
337 /* These args would always appear unused, if not for this. */
338 TREE_USED (copy) = 1;
340 /* Set the context for the new declaration. */
341 if (!DECL_CONTEXT (decl))
342 /* Globals stay global. */
344 else if (DECL_CONTEXT (decl) != from_fn)
345 /* Things that weren't in the scope of the function we're inlining
346 from aren't in the scope we're inlining too, either. */
348 else if (TREE_STATIC (decl))
349 /* Function-scoped static variables should say in the original
350 function. */
352 else
353 /* Ordinary automatic local variables are now in the scope of the
354 new function. */
355 DECL_CONTEXT (copy) = to_fn;
357 return copy;
360 /* Make the insns and PARM_DECLs of the current function permanent
361 and record other information in DECL_SAVED_INSNS to allow inlining
362 of this function in subsequent calls.
364 This routine need not copy any insns because we are not going
365 to immediately compile the insns in the insn chain. There
366 are two cases when we would compile the insns for FNDECL:
367 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
368 be output at the end of other compilation, because somebody took
369 its address. In the first case, the insns of FNDECL are copied
370 as it is expanded inline, so FNDECL's saved insns are not
371 modified. In the second case, FNDECL is used for the last time,
372 so modifying the rtl is not a problem.
374 We don't have to worry about FNDECL being inline expanded by
375 other functions which are written at the end of compilation
376 because flag_no_inline is turned on when we begin writing
377 functions at the end of compilation. */
379 void
380 save_for_inline_nocopy (fndecl)
381 tree fndecl;
383 rtx insn;
384 rtvec argvec;
385 rtx first_nonparm_insn;
387 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
388 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
389 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
390 for the parms, prior to elimination of virtual registers.
391 These values are needed for substituting parms properly. */
393 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
395 /* Make and emit a return-label if we have not already done so. */
397 if (return_label == 0)
399 return_label = gen_label_rtx ();
400 emit_label (return_label);
403 argvec = initialize_for_inline (fndecl);
405 /* If there are insns that copy parms from the stack into pseudo registers,
406 those insns are not copied. `expand_inline_function' must
407 emit the correct code to handle such things. */
409 insn = get_insns ();
410 if (GET_CODE (insn) != NOTE)
411 abort ();
413 /* Get the insn which signals the end of parameter setup code. */
414 first_nonparm_insn = get_first_nonparm_insn ();
416 /* Now just scan the chain of insns to see what happens to our
417 PARM_DECLs. If a PARM_DECL is used but never modified, we
418 can substitute its rtl directly when expanding inline (and
419 perform constant folding when its incoming value is constant).
420 Otherwise, we have to copy its value into a new register and track
421 the new register's life. */
422 in_nonparm_insns = 0;
423 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
425 if (insn == first_nonparm_insn)
426 in_nonparm_insns = 1;
428 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
429 /* Record what interesting things happen to our parameters. */
430 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
433 /* We have now allocated all that needs to be allocated permanently
434 on the rtx obstack. Set our high-water mark, so that we
435 can free the rest of this when the time comes. */
437 preserve_data ();
439 cfun->inl_max_label_num = max_label_num ();
440 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
441 cfun->original_arg_vector = argvec;
442 cfun->original_decl_initial = DECL_INITIAL (fndecl);
443 DECL_SAVED_INSNS (fndecl) = cfun;
445 /* Clean up. */
446 free (parmdecl_map);
449 /* Note whether a parameter is modified or not. */
451 static void
452 note_modified_parmregs (reg, x, data)
453 rtx reg;
454 rtx x ATTRIBUTE_UNUSED;
455 void *data ATTRIBUTE_UNUSED;
457 if (GET_CODE (reg) == REG && in_nonparm_insns
458 && REGNO (reg) < max_parm_reg
459 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
460 && parmdecl_map[REGNO (reg)] != 0)
461 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
464 /* Unfortunately, we need a global copy of const_equiv map for communication
465 with a function called from note_stores. Be *very* careful that this
466 is used properly in the presence of recursion. */
468 varray_type global_const_equiv_varray;
470 #define FIXED_BASE_PLUS_P(X) \
471 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
472 && GET_CODE (XEXP (X, 0)) == REG \
473 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
474 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
476 /* Called to set up a mapping for the case where a parameter is in a
477 register. If it is read-only and our argument is a constant, set up the
478 constant equivalence.
480 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
481 if it is a register.
483 Also, don't allow hard registers here; they might not be valid when
484 substituted into insns. */
485 static void
486 process_reg_param (map, loc, copy)
487 struct inline_remap *map;
488 rtx loc, copy;
490 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
491 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
492 && ! REG_USERVAR_P (copy))
493 || (GET_CODE (copy) == REG
494 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
496 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
497 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
498 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
499 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
500 copy = temp;
502 map->reg_map[REGNO (loc)] = copy;
505 /* Used by duplicate_eh_handlers to map labels for the exception table */
506 static struct inline_remap *eif_eh_map;
508 static rtx
509 expand_inline_function_eh_labelmap (label)
510 rtx label;
512 int index = CODE_LABEL_NUMBER (label);
513 return get_label_from_map (eif_eh_map, index);
516 /* Compare two BLOCKs for qsort. The key we sort on is the
517 BLOCK_ABSTRACT_ORIGIN of the blocks. */
519 static int
520 compare_blocks (v1, v2)
521 const PTR v1;
522 const PTR v2;
524 tree b1 = *((const tree *) v1);
525 tree b2 = *((const tree *) v2);
527 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
528 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
531 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
532 an original block; the second to a remapped equivalent. */
534 static int
535 find_block (v1, v2)
536 const PTR v1;
537 const PTR v2;
539 const union tree_node *b1 = (const union tree_node *) v1;
540 tree b2 = *((const tree *) v2);
542 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
545 /* Integrate the procedure defined by FNDECL. Note that this function
546 may wind up calling itself. Since the static variables are not
547 reentrant, we do not assign them until after the possibility
548 of recursion is eliminated.
550 If IGNORE is nonzero, do not produce a value.
551 Otherwise store the value in TARGET if it is nonzero and that is convenient.
553 Value is:
554 (rtx)-1 if we could not substitute the function
555 0 if we substituted it and it does not produce a value
556 else an rtx for where the value is stored. */
559 expand_inline_function (fndecl, parms, target, ignore, type,
560 structure_value_addr)
561 tree fndecl, parms;
562 rtx target;
563 int ignore;
564 tree type;
565 rtx structure_value_addr;
567 struct function *inlining_previous;
568 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
569 tree formal, actual, block;
570 rtx parm_insns = inl_f->emit->x_first_insn;
571 rtx insns = (inl_f->inl_last_parm_insn
572 ? NEXT_INSN (inl_f->inl_last_parm_insn)
573 : parm_insns);
574 tree *arg_trees;
575 rtx *arg_vals;
576 rtx insn;
577 int max_regno;
578 register int i;
579 int min_labelno = inl_f->emit->x_first_label_num;
580 int max_labelno = inl_f->inl_max_label_num;
581 int nargs;
582 rtx local_return_label = 0;
583 rtx loc;
584 rtx stack_save = 0;
585 rtx temp;
586 struct inline_remap *map = 0;
587 #ifdef HAVE_cc0
588 rtx cc0_insn = 0;
589 #endif
590 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
591 rtx static_chain_value = 0;
592 int inl_max_uid;
594 /* The pointer used to track the true location of the memory used
595 for MAP->LABEL_MAP. */
596 rtx *real_label_map = 0;
598 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
599 max_regno = inl_f->emit->x_reg_rtx_no + 3;
600 if (max_regno < FIRST_PSEUDO_REGISTER)
601 abort ();
603 nargs = list_length (DECL_ARGUMENTS (fndecl));
605 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
606 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
608 /* Check that the parms type match and that sufficient arguments were
609 passed. Since the appropriate conversions or default promotions have
610 already been applied, the machine modes should match exactly. */
612 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
613 formal;
614 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
616 tree arg;
617 enum machine_mode mode;
619 if (actual == 0)
620 return (rtx) (HOST_WIDE_INT) -1;
622 arg = TREE_VALUE (actual);
623 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
625 if (mode != TYPE_MODE (TREE_TYPE (arg))
626 /* If they are block mode, the types should match exactly.
627 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
628 which could happen if the parameter has incomplete type. */
629 || (mode == BLKmode
630 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
631 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
632 return (rtx) (HOST_WIDE_INT) -1;
635 /* Extra arguments are valid, but will be ignored below, so we must
636 evaluate them here for side-effects. */
637 for (; actual; actual = TREE_CHAIN (actual))
638 expand_expr (TREE_VALUE (actual), const0_rtx,
639 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
641 /* Expand the function arguments. Do this first so that any
642 new registers get created before we allocate the maps. */
644 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
645 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
647 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
648 formal;
649 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
651 /* Actual parameter, converted to the type of the argument within the
652 function. */
653 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
654 /* Mode of the variable used within the function. */
655 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
656 int invisiref = 0;
658 arg_trees[i] = arg;
659 loc = RTVEC_ELT (arg_vector, i);
661 /* If this is an object passed by invisible reference, we copy the
662 object into a stack slot and save its address. If this will go
663 into memory, we do nothing now. Otherwise, we just expand the
664 argument. */
665 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
666 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
668 rtx stack_slot
669 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
670 int_size_in_bytes (TREE_TYPE (arg)), 1);
671 MEM_SET_IN_STRUCT_P (stack_slot,
672 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
674 store_expr (arg, stack_slot, 0);
676 arg_vals[i] = XEXP (stack_slot, 0);
677 invisiref = 1;
679 else if (GET_CODE (loc) != MEM)
681 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
682 /* The mode if LOC and ARG can differ if LOC was a variable
683 that had its mode promoted via PROMOTED_MODE. */
684 arg_vals[i] = convert_modes (GET_MODE (loc),
685 TYPE_MODE (TREE_TYPE (arg)),
686 expand_expr (arg, NULL_RTX, mode,
687 EXPAND_SUM),
688 TREE_UNSIGNED (TREE_TYPE (formal)));
689 else
690 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
692 else
693 arg_vals[i] = 0;
695 if (arg_vals[i] != 0
696 && (! TREE_READONLY (formal)
697 /* If the parameter is not read-only, copy our argument through
698 a register. Also, we cannot use ARG_VALS[I] if it overlaps
699 TARGET in any way. In the inline function, they will likely
700 be two different pseudos, and `safe_from_p' will make all
701 sorts of smart assumptions about their not conflicting.
702 But if ARG_VALS[I] overlaps TARGET, these assumptions are
703 wrong, so put ARG_VALS[I] into a fresh register.
704 Don't worry about invisible references, since their stack
705 temps will never overlap the target. */
706 || (target != 0
707 && ! invisiref
708 && (GET_CODE (arg_vals[i]) == REG
709 || GET_CODE (arg_vals[i]) == SUBREG
710 || GET_CODE (arg_vals[i]) == MEM)
711 && reg_overlap_mentioned_p (arg_vals[i], target))
712 /* ??? We must always copy a SUBREG into a REG, because it might
713 get substituted into an address, and not all ports correctly
714 handle SUBREGs in addresses. */
715 || (GET_CODE (arg_vals[i]) == SUBREG)))
716 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
718 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
719 && POINTER_TYPE_P (TREE_TYPE (formal)))
720 mark_reg_pointer (arg_vals[i],
721 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
722 / BITS_PER_UNIT));
725 /* Allocate the structures we use to remap things. */
727 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
728 map->fndecl = fndecl;
730 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
731 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
733 /* We used to use alloca here, but the size of what it would try to
734 allocate would occasionally cause it to exceed the stack limit and
735 cause unpredictable core dumps. */
736 real_label_map
737 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
738 map->label_map = real_label_map;
740 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
741 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
742 map->min_insnno = 0;
743 map->max_insnno = inl_max_uid;
745 map->integrating = 1;
747 /* const_equiv_varray maps pseudos in our routine to constants, so
748 it needs to be large enough for all our pseudos. This is the
749 number we are currently using plus the number in the called
750 routine, plus 15 for each arg, five to compute the virtual frame
751 pointer, and five for the return value. This should be enough
752 for most cases. We do not reference entries outside the range of
753 the map.
755 ??? These numbers are quite arbitrary and were obtained by
756 experimentation. At some point, we should try to allocate the
757 table after all the parameters are set up so we an more accurately
758 estimate the number of pseudos we will need. */
760 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
761 (max_reg_num ()
762 + (max_regno - FIRST_PSEUDO_REGISTER)
763 + 15 * nargs
764 + 10),
765 "expand_inline_function");
766 map->const_age = 0;
768 /* Record the current insn in case we have to set up pointers to frame
769 and argument memory blocks. If there are no insns yet, add a dummy
770 insn that can be used as an insertion point. */
771 map->insns_at_start = get_last_insn ();
772 if (map->insns_at_start == 0)
773 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
775 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
776 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
778 /* Update the outgoing argument size to allow for those in the inlined
779 function. */
780 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
781 current_function_outgoing_args_size = inl_f->outgoing_args_size;
783 /* If the inline function needs to make PIC references, that means
784 that this function's PIC offset table must be used. */
785 if (inl_f->uses_pic_offset_table)
786 current_function_uses_pic_offset_table = 1;
788 /* If this function needs a context, set it up. */
789 if (inl_f->needs_context)
790 static_chain_value = lookup_static_chain (fndecl);
792 if (GET_CODE (parm_insns) == NOTE
793 && NOTE_LINE_NUMBER (parm_insns) > 0)
795 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
796 NOTE_LINE_NUMBER (parm_insns));
797 if (note)
798 RTX_INTEGRATED_P (note) = 1;
801 /* Process each argument. For each, set up things so that the function's
802 reference to the argument will refer to the argument being passed.
803 We only replace REG with REG here. Any simplifications are done
804 via const_equiv_map.
806 We make two passes: In the first, we deal with parameters that will
807 be placed into registers, since we need to ensure that the allocated
808 register number fits in const_equiv_map. Then we store all non-register
809 parameters into their memory location. */
811 /* Don't try to free temp stack slots here, because we may put one of the
812 parameters into a temp stack slot. */
814 for (i = 0; i < nargs; i++)
816 rtx copy = arg_vals[i];
818 loc = RTVEC_ELT (arg_vector, i);
820 /* There are three cases, each handled separately. */
821 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
822 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
824 /* This must be an object passed by invisible reference (it could
825 also be a variable-sized object, but we forbid inlining functions
826 with variable-sized arguments). COPY is the address of the
827 actual value (this computation will cause it to be copied). We
828 map that address for the register, noting the actual address as
829 an equivalent in case it can be substituted into the insns. */
831 if (GET_CODE (copy) != REG)
833 temp = copy_addr_to_reg (copy);
834 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
835 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
836 copy = temp;
838 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
840 else if (GET_CODE (loc) == MEM)
842 /* This is the case of a parameter that lives in memory. It
843 will live in the block we allocate in the called routine's
844 frame that simulates the incoming argument area. Do nothing
845 with the parameter now; we will call store_expr later. In
846 this case, however, we must ensure that the virtual stack and
847 incoming arg rtx values are expanded now so that we can be
848 sure we have enough slots in the const equiv map since the
849 store_expr call can easily blow the size estimate. */
850 if (DECL_FRAME_SIZE (fndecl) != 0)
851 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
853 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
854 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
856 else if (GET_CODE (loc) == REG)
857 process_reg_param (map, loc, copy);
858 else if (GET_CODE (loc) == CONCAT)
860 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
861 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
862 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
863 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
865 process_reg_param (map, locreal, copyreal);
866 process_reg_param (map, locimag, copyimag);
868 else
869 abort ();
872 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
873 specially. This function can be called recursively, so we need to
874 save the previous value. */
875 inlining_previous = inlining;
876 inlining = inl_f;
878 /* Now do the parameters that will be placed in memory. */
880 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
881 formal; formal = TREE_CHAIN (formal), i++)
883 loc = RTVEC_ELT (arg_vector, i);
885 if (GET_CODE (loc) == MEM
886 /* Exclude case handled above. */
887 && ! (GET_CODE (XEXP (loc, 0)) == REG
888 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
890 rtx note = emit_note (DECL_SOURCE_FILE (formal),
891 DECL_SOURCE_LINE (formal));
892 if (note)
893 RTX_INTEGRATED_P (note) = 1;
895 /* Compute the address in the area we reserved and store the
896 value there. */
897 temp = copy_rtx_and_substitute (loc, map, 1);
898 subst_constants (&temp, NULL_RTX, map, 1);
899 apply_change_group ();
900 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
901 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
902 store_expr (arg_trees[i], temp, 0);
906 /* Deal with the places that the function puts its result.
907 We are driven by what is placed into DECL_RESULT.
909 Initially, we assume that we don't have anything special handling for
910 REG_FUNCTION_RETURN_VALUE_P. */
912 map->inline_target = 0;
913 loc = DECL_RTL (DECL_RESULT (fndecl));
915 if (TYPE_MODE (type) == VOIDmode)
916 /* There is no return value to worry about. */
918 else if (GET_CODE (loc) == MEM)
920 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
922 temp = copy_rtx_and_substitute (loc, map, 1);
923 subst_constants (&temp, NULL_RTX, map, 1);
924 apply_change_group ();
925 target = temp;
927 else
929 if (! structure_value_addr
930 || ! aggregate_value_p (DECL_RESULT (fndecl)))
931 abort ();
933 /* Pass the function the address in which to return a structure
934 value. Note that a constructor can cause someone to call us
935 with STRUCTURE_VALUE_ADDR, but the initialization takes place
936 via the first parameter, rather than the struct return address.
938 We have two cases: If the address is a simple register
939 indirect, use the mapping mechanism to point that register to
940 our structure return address. Otherwise, store the structure
941 return value into the place that it will be referenced from. */
943 if (GET_CODE (XEXP (loc, 0)) == REG)
945 temp = force_operand (structure_value_addr, NULL_RTX);
946 temp = force_reg (Pmode, temp);
947 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
949 if (CONSTANT_P (structure_value_addr)
950 || GET_CODE (structure_value_addr) == ADDRESSOF
951 || (GET_CODE (structure_value_addr) == PLUS
952 && (XEXP (structure_value_addr, 0)
953 == virtual_stack_vars_rtx)
954 && (GET_CODE (XEXP (structure_value_addr, 1))
955 == CONST_INT)))
957 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
958 CONST_AGE_PARM);
961 else
963 temp = copy_rtx_and_substitute (loc, map, 1);
964 subst_constants (&temp, NULL_RTX, map, 0);
965 apply_change_group ();
966 emit_move_insn (temp, structure_value_addr);
970 else if (ignore)
971 /* We will ignore the result value, so don't look at its structure.
972 Note that preparations for an aggregate return value
973 do need to be made (above) even if it will be ignored. */
975 else if (GET_CODE (loc) == REG)
977 /* The function returns an object in a register and we use the return
978 value. Set up our target for remapping. */
980 /* Machine mode function was declared to return. */
981 enum machine_mode departing_mode = TYPE_MODE (type);
982 /* (Possibly wider) machine mode it actually computes
983 (for the sake of callers that fail to declare it right).
984 We have to use the mode of the result's RTL, rather than
985 its type, since expand_function_start may have promoted it. */
986 enum machine_mode arriving_mode
987 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
988 rtx reg_to_map;
990 /* Don't use MEMs as direct targets because on some machines
991 substituting a MEM for a REG makes invalid insns.
992 Let the combiner substitute the MEM if that is valid. */
993 if (target == 0 || GET_CODE (target) != REG
994 || GET_MODE (target) != departing_mode)
996 /* Don't make BLKmode registers. If this looks like
997 a BLKmode object being returned in a register, get
998 the mode from that, otherwise abort. */
999 if (departing_mode == BLKmode)
1001 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1003 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1004 arriving_mode = departing_mode;
1006 else
1007 abort();
1010 target = gen_reg_rtx (departing_mode);
1013 /* If function's value was promoted before return,
1014 avoid machine mode mismatch when we substitute INLINE_TARGET.
1015 But TARGET is what we will return to the caller. */
1016 if (arriving_mode != departing_mode)
1018 /* Avoid creating a paradoxical subreg wider than
1019 BITS_PER_WORD, since that is illegal. */
1020 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1022 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1023 GET_MODE_BITSIZE (arriving_mode)))
1024 /* Maybe could be handled by using convert_move () ? */
1025 abort ();
1026 reg_to_map = gen_reg_rtx (arriving_mode);
1027 target = gen_lowpart (departing_mode, reg_to_map);
1029 else
1030 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1032 else
1033 reg_to_map = target;
1035 /* Usually, the result value is the machine's return register.
1036 Sometimes it may be a pseudo. Handle both cases. */
1037 if (REG_FUNCTION_VALUE_P (loc))
1038 map->inline_target = reg_to_map;
1039 else
1040 map->reg_map[REGNO (loc)] = reg_to_map;
1042 else
1043 abort ();
1045 /* Initialize label_map. get_label_from_map will actually make
1046 the labels. */
1047 bzero ((char *) &map->label_map [min_labelno],
1048 (max_labelno - min_labelno) * sizeof (rtx));
1050 /* Make copies of the decls of the symbols in the inline function, so that
1051 the copies of the variables get declared in the current function. Set
1052 up things so that lookup_static_chain knows that to interpret registers
1053 in SAVE_EXPRs for TYPE_SIZEs as local. */
1054 inline_function_decl = fndecl;
1055 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1056 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1057 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1058 inline_function_decl = 0;
1060 /* Make a fresh binding contour that we can easily remove. Do this after
1061 expanding our arguments so cleanups are properly scoped. */
1062 expand_start_bindings_and_block (0, block);
1064 /* Sort the block-map so that it will be easy to find remapped
1065 blocks later. */
1066 qsort (&VARRAY_TREE (map->block_map, 0),
1067 map->block_map->elements_used,
1068 sizeof (tree),
1069 compare_blocks);
1071 /* Perform postincrements before actually calling the function. */
1072 emit_queue ();
1074 /* Clean up stack so that variables might have smaller offsets. */
1075 do_pending_stack_adjust ();
1077 /* Save a copy of the location of const_equiv_varray for
1078 mark_stores, called via note_stores. */
1079 global_const_equiv_varray = map->const_equiv_varray;
1081 /* If the called function does an alloca, save and restore the
1082 stack pointer around the call. This saves stack space, but
1083 also is required if this inline is being done between two
1084 pushes. */
1085 if (inl_f->calls_alloca)
1086 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1088 /* Now copy the insns one by one. Do this in two passes, first the insns and
1089 then their REG_NOTES, just like save_for_inline. */
1091 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1093 for (insn = insns; insn; insn = NEXT_INSN (insn))
1095 rtx copy, pattern, set;
1097 map->orig_asm_operands_vector = 0;
1099 switch (GET_CODE (insn))
1101 case INSN:
1102 pattern = PATTERN (insn);
1103 set = single_set (insn);
1104 copy = 0;
1105 if (GET_CODE (pattern) == USE
1106 && GET_CODE (XEXP (pattern, 0)) == REG
1107 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1108 /* The (USE (REG n)) at return from the function should
1109 be ignored since we are changing (REG n) into
1110 inline_target. */
1111 break;
1113 /* If the inline fn needs eh context, make sure that
1114 the current fn has one. */
1115 if (GET_CODE (pattern) == USE
1116 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1117 get_eh_context ();
1119 /* Ignore setting a function value that we don't want to use. */
1120 if (map->inline_target == 0
1121 && set != 0
1122 && GET_CODE (SET_DEST (set)) == REG
1123 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1125 if (volatile_refs_p (SET_SRC (set)))
1127 rtx new_set;
1129 /* If we must not delete the source,
1130 load it into a new temporary. */
1131 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1133 new_set = single_set (copy);
1134 if (new_set == 0)
1135 abort ();
1137 SET_DEST (new_set)
1138 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1140 /* If the source and destination are the same and it
1141 has a note on it, keep the insn. */
1142 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1143 && REG_NOTES (insn) != 0)
1144 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1145 else
1146 break;
1149 /* If this is setting the static chain rtx, omit it. */
1150 else if (static_chain_value != 0
1151 && set != 0
1152 && GET_CODE (SET_DEST (set)) == REG
1153 && rtx_equal_p (SET_DEST (set),
1154 static_chain_incoming_rtx))
1155 break;
1157 /* If this is setting the static chain pseudo, set it from
1158 the value we want to give it instead. */
1159 else if (static_chain_value != 0
1160 && set != 0
1161 && rtx_equal_p (SET_SRC (set),
1162 static_chain_incoming_rtx))
1164 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1166 copy = emit_move_insn (newdest, static_chain_value);
1167 static_chain_value = 0;
1170 /* If this is setting the virtual stack vars register, this must
1171 be the code at the handler for a builtin longjmp. The value
1172 saved in the setjmp buffer will be the address of the frame
1173 we've made for this inlined instance within our frame. But we
1174 know the offset of that value so we can use it to reconstruct
1175 our virtual stack vars register from that value. If we are
1176 copying it from the stack pointer, leave it unchanged. */
1177 else if (set != 0
1178 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1180 HOST_WIDE_INT offset;
1181 temp = map->reg_map[REGNO (SET_DEST (set))];
1182 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1183 REGNO (temp)).rtx;
1185 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1186 offset = 0;
1187 else if (GET_CODE (temp) == PLUS
1188 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1189 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1190 offset = INTVAL (XEXP (temp, 1));
1191 else
1192 abort ();
1194 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1195 temp = SET_SRC (set);
1196 else
1197 temp = force_operand (plus_constant (SET_SRC (set),
1198 - offset),
1199 NULL_RTX);
1201 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1204 else
1205 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1206 /* REG_NOTES will be copied later. */
1208 #ifdef HAVE_cc0
1209 /* If this insn is setting CC0, it may need to look at
1210 the insn that uses CC0 to see what type of insn it is.
1211 In that case, the call to recog via validate_change will
1212 fail. So don't substitute constants here. Instead,
1213 do it when we emit the following insn.
1215 For example, see the pyr.md file. That machine has signed and
1216 unsigned compares. The compare patterns must check the
1217 following branch insn to see which what kind of compare to
1218 emit.
1220 If the previous insn set CC0, substitute constants on it as
1221 well. */
1222 if (sets_cc0_p (PATTERN (copy)) != 0)
1223 cc0_insn = copy;
1224 else
1226 if (cc0_insn)
1227 try_constants (cc0_insn, map);
1228 cc0_insn = 0;
1229 try_constants (copy, map);
1231 #else
1232 try_constants (copy, map);
1233 #endif
1234 break;
1236 case JUMP_INSN:
1237 if (GET_CODE (PATTERN (insn)) == RETURN
1238 || (GET_CODE (PATTERN (insn)) == PARALLEL
1239 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1241 if (local_return_label == 0)
1242 local_return_label = gen_label_rtx ();
1243 pattern = gen_jump (local_return_label);
1245 else
1246 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1248 copy = emit_jump_insn (pattern);
1250 #ifdef HAVE_cc0
1251 if (cc0_insn)
1252 try_constants (cc0_insn, map);
1253 cc0_insn = 0;
1254 #endif
1255 try_constants (copy, map);
1257 /* If this used to be a conditional jump insn but whose branch
1258 direction is now know, we must do something special. */
1259 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1261 #ifdef HAVE_cc0
1262 /* If the previous insn set cc0 for us, delete it. */
1263 if (sets_cc0_p (PREV_INSN (copy)))
1264 delete_insn (PREV_INSN (copy));
1265 #endif
1267 /* If this is now a no-op, delete it. */
1268 if (map->last_pc_value == pc_rtx)
1270 delete_insn (copy);
1271 copy = 0;
1273 else
1274 /* Otherwise, this is unconditional jump so we must put a
1275 BARRIER after it. We could do some dead code elimination
1276 here, but jump.c will do it just as well. */
1277 emit_barrier ();
1279 break;
1281 case CALL_INSN:
1282 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1283 copy = emit_call_insn (pattern);
1285 /* Because the USAGE information potentially contains objects other
1286 than hard registers, we need to copy it. */
1287 CALL_INSN_FUNCTION_USAGE (copy)
1288 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1289 map, 0);
1291 #ifdef HAVE_cc0
1292 if (cc0_insn)
1293 try_constants (cc0_insn, map);
1294 cc0_insn = 0;
1295 #endif
1296 try_constants (copy, map);
1298 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1299 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1300 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1301 break;
1303 case CODE_LABEL:
1304 copy = emit_label (get_label_from_map (map,
1305 CODE_LABEL_NUMBER (insn)));
1306 LABEL_NAME (copy) = LABEL_NAME (insn);
1307 map->const_age++;
1308 break;
1310 case BARRIER:
1311 copy = emit_barrier ();
1312 break;
1314 case NOTE:
1315 /* It is important to discard function-end and function-beg notes,
1316 so we have only one of each in the current function.
1317 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1318 deleted these in the copy used for continuing compilation,
1319 not the copy used for inlining). */
1320 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1321 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1322 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1324 copy = emit_note (NOTE_SOURCE_FILE (insn),
1325 NOTE_LINE_NUMBER (insn));
1326 if (copy
1327 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1328 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1330 rtx label
1331 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
1333 /* we have to duplicate the handlers for the original */
1334 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1336 /* We need to duplicate the handlers for the EH region
1337 and we need to indicate where the label map is */
1338 eif_eh_map = map;
1339 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1340 CODE_LABEL_NUMBER (label),
1341 expand_inline_function_eh_labelmap);
1344 /* We have to forward these both to match the new exception
1345 region. */
1346 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
1348 else if (copy
1349 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1350 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1351 && NOTE_BLOCK (insn))
1353 tree *mapped_block_p;
1355 mapped_block_p
1356 = (tree *) bsearch (NOTE_BLOCK (insn),
1357 &VARRAY_TREE (map->block_map, 0),
1358 map->block_map->elements_used,
1359 sizeof (tree),
1360 find_block);
1362 if (!mapped_block_p)
1363 abort ();
1364 else
1365 NOTE_BLOCK (copy) = *mapped_block_p;
1368 else
1369 copy = 0;
1370 break;
1372 default:
1373 abort ();
1376 if (copy)
1377 RTX_INTEGRATED_P (copy) = 1;
1379 map->insn_map[INSN_UID (insn)] = copy;
1382 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1383 from parameters can be substituted in. These are the only ones that
1384 are valid across the entire function. */
1385 map->const_age++;
1386 for (insn = insns; insn; insn = NEXT_INSN (insn))
1387 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1388 && map->insn_map[INSN_UID (insn)]
1389 && REG_NOTES (insn))
1391 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1393 /* We must also do subst_constants, in case one of our parameters
1394 has const type and constant value. */
1395 subst_constants (&tem, NULL_RTX, map, 0);
1396 apply_change_group ();
1397 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1400 if (local_return_label)
1401 emit_label (local_return_label);
1403 /* Restore the stack pointer if we saved it above. */
1404 if (inl_f->calls_alloca)
1405 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1407 if (! cfun->x_whole_function_mode_p)
1408 /* In statement-at-a-time mode, we just tell the front-end to add
1409 this block to the list of blocks at this binding level. We
1410 can't do it the way it's done for function-at-a-time mode the
1411 superblocks have not been created yet. */
1412 insert_block (block);
1413 else
1415 BLOCK_CHAIN (block)
1416 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1417 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1420 /* End the scope containing the copied formal parameter variables
1421 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1422 here so that expand_end_bindings will not check for unused
1423 variables. That's already been checked for when the inlined
1424 function was defined. */
1425 expand_end_bindings (NULL_TREE, 1, 1);
1427 /* Must mark the line number note after inlined functions as a repeat, so
1428 that the test coverage code can avoid counting the call twice. This
1429 just tells the code to ignore the immediately following line note, since
1430 there already exists a copy of this note before the expanded inline call.
1431 This line number note is still needed for debugging though, so we can't
1432 delete it. */
1433 if (flag_test_coverage)
1434 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
1436 emit_line_note (input_filename, lineno);
1438 /* If the function returns a BLKmode object in a register, copy it
1439 out of the temp register into a BLKmode memory object. */
1440 if (target
1441 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1442 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1443 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1445 if (structure_value_addr)
1447 target = gen_rtx_MEM (TYPE_MODE (type),
1448 memory_address (TYPE_MODE (type),
1449 structure_value_addr));
1450 MEM_SET_IN_STRUCT_P (target, 1);
1453 /* Make sure we free the things we explicitly allocated with xmalloc. */
1454 if (real_label_map)
1455 free (real_label_map);
1456 VARRAY_FREE (map->const_equiv_varray);
1457 free (map->reg_map);
1458 VARRAY_FREE (map->block_map);
1459 free (map->insn_map);
1460 free (map);
1461 free (arg_vals);
1462 free (arg_trees);
1464 inlining = inlining_previous;
1466 return target;
1469 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1470 push all of those decls and give each one the corresponding home. */
1472 static void
1473 integrate_parm_decls (args, map, arg_vector)
1474 tree args;
1475 struct inline_remap *map;
1476 rtvec arg_vector;
1478 register tree tail;
1479 register int i;
1481 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1483 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1484 current_function_decl);
1485 rtx new_decl_rtl
1486 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1488 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1489 here, but that's going to require some more work. */
1490 /* DECL_INCOMING_RTL (decl) = ?; */
1491 /* Fully instantiate the address with the equivalent form so that the
1492 debugging information contains the actual register, instead of the
1493 virtual register. Do this by not passing an insn to
1494 subst_constants. */
1495 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1496 apply_change_group ();
1497 DECL_RTL (decl) = new_decl_rtl;
1501 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1502 current function a tree of contexts isomorphic to the one that is given.
1504 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1505 registers used in the DECL_RTL field should be remapped. If it is zero,
1506 no mapping is necessary. */
1508 static tree
1509 integrate_decl_tree (let, map)
1510 tree let;
1511 struct inline_remap *map;
1513 tree t;
1514 tree new_block;
1515 tree *next;
1517 new_block = make_node (BLOCK);
1518 VARRAY_PUSH_TREE (map->block_map, new_block);
1519 next = &BLOCK_VARS (new_block);
1521 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1523 tree d;
1525 push_obstacks_nochange ();
1526 saveable_allocation ();
1527 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1528 pop_obstacks ();
1530 if (DECL_RTL (t) != 0)
1532 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map, 1);
1534 /* Fully instantiate the address with the equivalent form so that the
1535 debugging information contains the actual register, instead of the
1536 virtual register. Do this by not passing an insn to
1537 subst_constants. */
1538 subst_constants (&DECL_RTL (d), NULL_RTX, map, 1);
1539 apply_change_group ();
1542 /* Add this declaration to the list of variables in the new
1543 block. */
1544 *next = d;
1545 next = &TREE_CHAIN (d);
1548 next = &BLOCK_SUBBLOCKS (new_block);
1549 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1551 *next = integrate_decl_tree (t, map);
1552 BLOCK_SUPERCONTEXT (*next) = new_block;
1553 next = &BLOCK_CHAIN (*next);
1556 TREE_USED (new_block) = TREE_USED (let);
1557 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1559 return new_block;
1562 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1563 except for those few rtx codes that are sharable.
1565 We always return an rtx that is similar to that incoming rtx, with the
1566 exception of possibly changing a REG to a SUBREG or vice versa. No
1567 rtl is ever emitted.
1569 If FOR_LHS is nonzero, if means we are processing something that will
1570 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1571 inlining since we need to be conservative in how it is set for
1572 such cases.
1574 Handle constants that need to be placed in the constant pool by
1575 calling `force_const_mem'. */
1578 copy_rtx_and_substitute (orig, map, for_lhs)
1579 register rtx orig;
1580 struct inline_remap *map;
1581 int for_lhs;
1583 register rtx copy, temp;
1584 register int i, j;
1585 register RTX_CODE code;
1586 register enum machine_mode mode;
1587 register const char *format_ptr;
1588 int regno;
1590 if (orig == 0)
1591 return 0;
1593 code = GET_CODE (orig);
1594 mode = GET_MODE (orig);
1596 switch (code)
1598 case REG:
1599 /* If the stack pointer register shows up, it must be part of
1600 stack-adjustments (*not* because we eliminated the frame pointer!).
1601 Small hard registers are returned as-is. Pseudo-registers
1602 go through their `reg_map'. */
1603 regno = REGNO (orig);
1604 if (regno <= LAST_VIRTUAL_REGISTER
1605 || (map->integrating
1606 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1608 /* Some hard registers are also mapped,
1609 but others are not translated. */
1610 if (map->reg_map[regno] != 0)
1611 return map->reg_map[regno];
1613 /* If this is the virtual frame pointer, make space in current
1614 function's stack frame for the stack frame of the inline function.
1616 Copy the address of this area into a pseudo. Map
1617 virtual_stack_vars_rtx to this pseudo and set up a constant
1618 equivalence for it to be the address. This will substitute the
1619 address into insns where it can be substituted and use the new
1620 pseudo where it can't. */
1621 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1623 rtx loc, seq;
1624 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1625 #ifdef FRAME_GROWS_DOWNWARD
1626 int alignment
1627 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1628 / BITS_PER_UNIT);
1630 /* In this case, virtual_stack_vars_rtx points to one byte
1631 higher than the top of the frame area. So make sure we
1632 allocate a big enough chunk to keep the frame pointer
1633 aligned like a real one. */
1634 if (alignment)
1635 size = CEIL_ROUND (size, alignment);
1636 #endif
1637 start_sequence ();
1638 loc = assign_stack_temp (BLKmode, size, 1);
1639 loc = XEXP (loc, 0);
1640 #ifdef FRAME_GROWS_DOWNWARD
1641 /* In this case, virtual_stack_vars_rtx points to one byte
1642 higher than the top of the frame area. So compute the offset
1643 to one byte higher than our substitute frame. */
1644 loc = plus_constant (loc, size);
1645 #endif
1646 map->reg_map[regno] = temp
1647 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1649 #ifdef STACK_BOUNDARY
1650 mark_reg_pointer (map->reg_map[regno],
1651 STACK_BOUNDARY / BITS_PER_UNIT);
1652 #endif
1654 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1656 seq = gen_sequence ();
1657 end_sequence ();
1658 emit_insn_after (seq, map->insns_at_start);
1659 return temp;
1661 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1662 || (map->integrating
1663 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1664 == orig)))
1666 /* Do the same for a block to contain any arguments referenced
1667 in memory. */
1668 rtx loc, seq;
1669 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1671 start_sequence ();
1672 loc = assign_stack_temp (BLKmode, size, 1);
1673 loc = XEXP (loc, 0);
1674 /* When arguments grow downward, the virtual incoming
1675 args pointer points to the top of the argument block,
1676 so the remapped location better do the same. */
1677 #ifdef ARGS_GROW_DOWNWARD
1678 loc = plus_constant (loc, size);
1679 #endif
1680 map->reg_map[regno] = temp
1681 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1683 #ifdef STACK_BOUNDARY
1684 mark_reg_pointer (map->reg_map[regno],
1685 STACK_BOUNDARY / BITS_PER_UNIT);
1686 #endif
1688 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1690 seq = gen_sequence ();
1691 end_sequence ();
1692 emit_insn_after (seq, map->insns_at_start);
1693 return temp;
1695 else if (REG_FUNCTION_VALUE_P (orig))
1697 /* This is a reference to the function return value. If
1698 the function doesn't have a return value, error. If the
1699 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1700 if (map->inline_target == 0)
1701 /* Must be unrolling loops or replicating code if we
1702 reach here, so return the register unchanged. */
1703 return orig;
1704 else if (GET_MODE (map->inline_target) != BLKmode
1705 && mode != GET_MODE (map->inline_target))
1706 return gen_lowpart (mode, map->inline_target);
1707 else
1708 return map->inline_target;
1710 return orig;
1712 if (map->reg_map[regno] == NULL)
1714 map->reg_map[regno] = gen_reg_rtx (mode);
1715 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1716 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1717 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1718 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1720 if (map->regno_pointer_flag[regno])
1721 mark_reg_pointer (map->reg_map[regno],
1722 map->regno_pointer_align[regno]);
1724 return map->reg_map[regno];
1726 case SUBREG:
1727 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1728 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1729 if (GET_CODE (copy) == SUBREG)
1730 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1731 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1732 else if (GET_CODE (copy) == CONCAT)
1734 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1736 if (GET_MODE (retval) == GET_MODE (orig))
1737 return retval;
1738 else
1739 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1740 (SUBREG_WORD (orig) %
1741 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1742 / (unsigned) UNITS_PER_WORD)));
1744 else
1745 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1746 SUBREG_WORD (orig));
1748 case ADDRESSOF:
1749 copy = gen_rtx_ADDRESSOF (mode,
1750 copy_rtx_and_substitute (XEXP (orig, 0),
1751 map, for_lhs),
1752 0, ADDRESSOF_DECL(orig));
1753 regno = ADDRESSOF_REGNO (orig);
1754 if (map->reg_map[regno])
1755 regno = REGNO (map->reg_map[regno]);
1756 else if (regno > LAST_VIRTUAL_REGISTER)
1758 temp = XEXP (orig, 0);
1759 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1760 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1761 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1762 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1763 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1765 if (map->regno_pointer_flag[regno])
1766 mark_reg_pointer (map->reg_map[regno],
1767 map->regno_pointer_align[regno]);
1768 regno = REGNO (map->reg_map[regno]);
1770 ADDRESSOF_REGNO (copy) = regno;
1771 return copy;
1773 case USE:
1774 case CLOBBER:
1775 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1776 to (use foo) if the original insn didn't have a subreg.
1777 Removing the subreg distorts the VAX movstrhi pattern
1778 by changing the mode of an operand. */
1779 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1780 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1781 copy = SUBREG_REG (copy);
1782 return gen_rtx_fmt_e (code, VOIDmode, copy);
1784 case CODE_LABEL:
1785 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1786 = LABEL_PRESERVE_P (orig);
1787 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1789 case LABEL_REF:
1790 copy
1791 = gen_rtx_LABEL_REF
1792 (mode,
1793 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1794 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1796 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1798 /* The fact that this label was previously nonlocal does not mean
1799 it still is, so we must check if it is within the range of
1800 this function's labels. */
1801 LABEL_REF_NONLOCAL_P (copy)
1802 = (LABEL_REF_NONLOCAL_P (orig)
1803 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1804 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1806 /* If we have made a nonlocal label local, it means that this
1807 inlined call will be referring to our nonlocal goto handler.
1808 So make sure we create one for this block; we normally would
1809 not since this is not otherwise considered a "call". */
1810 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1811 function_call_count++;
1813 return copy;
1815 case PC:
1816 case CC0:
1817 case CONST_INT:
1818 return orig;
1820 case SYMBOL_REF:
1821 /* Symbols which represent the address of a label stored in the constant
1822 pool must be modified to point to a constant pool entry for the
1823 remapped label. Otherwise, symbols are returned unchanged. */
1824 if (CONSTANT_POOL_ADDRESS_P (orig))
1826 struct function *f = inlining ? inlining : cfun;
1827 rtx constant = get_pool_constant_for_function (f, orig);
1828 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1829 if (inlining)
1831 rtx temp = force_const_mem (const_mode,
1832 copy_rtx_and_substitute (constant,
1833 map, 0));
1835 #if 0
1836 /* Legitimizing the address here is incorrect.
1838 Since we had a SYMBOL_REF before, we can assume it is valid
1839 to have one in this position in the insn.
1841 Also, change_address may create new registers. These
1842 registers will not have valid reg_map entries. This can
1843 cause try_constants() to fail because assumes that all
1844 registers in the rtx have valid reg_map entries, and it may
1845 end up replacing one of these new registers with junk. */
1847 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1848 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1849 #endif
1851 temp = XEXP (temp, 0);
1853 #ifdef POINTERS_EXTEND_UNSIGNED
1854 if (GET_MODE (temp) != GET_MODE (orig))
1855 temp = convert_memory_address (GET_MODE (orig), temp);
1856 #endif
1857 return temp;
1859 else if (GET_CODE (constant) == LABEL_REF)
1860 return XEXP (force_const_mem
1861 (GET_MODE (orig),
1862 copy_rtx_and_substitute (constant, map, for_lhs)),
1865 else
1866 if (SYMBOL_REF_NEED_ADJUST (orig))
1868 eif_eh_map = map;
1869 return rethrow_symbol_map (orig,
1870 expand_inline_function_eh_labelmap);
1873 return orig;
1875 case CONST_DOUBLE:
1876 /* We have to make a new copy of this CONST_DOUBLE because don't want
1877 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1878 duplicate of a CONST_DOUBLE we have already seen. */
1879 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1881 REAL_VALUE_TYPE d;
1883 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1884 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
1886 else
1887 return immed_double_const (CONST_DOUBLE_LOW (orig),
1888 CONST_DOUBLE_HIGH (orig), VOIDmode);
1890 case CONST:
1891 /* Make new constant pool entry for a constant
1892 that was in the pool of the inline function. */
1893 if (RTX_INTEGRATED_P (orig))
1894 abort ();
1895 break;
1897 case ASM_OPERANDS:
1898 /* If a single asm insn contains multiple output operands
1899 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1900 We must make sure that the copied insn continues to share it. */
1901 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1903 copy = rtx_alloc (ASM_OPERANDS);
1904 copy->volatil = orig->volatil;
1905 XSTR (copy, 0) = XSTR (orig, 0);
1906 XSTR (copy, 1) = XSTR (orig, 1);
1907 XINT (copy, 2) = XINT (orig, 2);
1908 XVEC (copy, 3) = map->copy_asm_operands_vector;
1909 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1910 XSTR (copy, 5) = XSTR (orig, 5);
1911 XINT (copy, 6) = XINT (orig, 6);
1912 return copy;
1914 break;
1916 case CALL:
1917 /* This is given special treatment because the first
1918 operand of a CALL is a (MEM ...) which may get
1919 forced into a register for cse. This is undesirable
1920 if function-address cse isn't wanted or if we won't do cse. */
1921 #ifndef NO_FUNCTION_CSE
1922 if (! (optimize && ! flag_no_function_cse))
1923 #endif
1924 return
1925 gen_rtx_CALL
1926 (GET_MODE (orig),
1927 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
1928 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
1929 map, 0)),
1930 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
1931 break;
1933 #if 0
1934 /* Must be ifdefed out for loop unrolling to work. */
1935 case RETURN:
1936 abort ();
1937 #endif
1939 case SET:
1940 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1941 Adjust the setting by the offset of the area we made.
1942 If the nonlocal goto is into the current function,
1943 this will result in unnecessarily bad code, but should work. */
1944 if (SET_DEST (orig) == virtual_stack_vars_rtx
1945 || SET_DEST (orig) == virtual_incoming_args_rtx)
1947 /* In case a translation hasn't occurred already, make one now. */
1948 rtx equiv_reg;
1949 rtx equiv_loc;
1950 HOST_WIDE_INT loc_offset;
1952 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
1953 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
1954 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1955 REGNO (equiv_reg)).rtx;
1956 loc_offset
1957 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
1959 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
1960 force_operand
1961 (plus_constant
1962 (copy_rtx_and_substitute (SET_SRC (orig),
1963 map, 0),
1964 - loc_offset),
1965 NULL_RTX));
1967 else
1968 return gen_rtx_SET (VOIDmode,
1969 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
1970 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
1971 break;
1973 case MEM:
1974 if (inlining
1975 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
1976 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
1978 enum machine_mode const_mode
1979 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
1980 rtx constant
1981 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
1983 constant = copy_rtx_and_substitute (constant, map, 0);
1985 /* If this was an address of a constant pool entry that itself
1986 had to be placed in the constant pool, it might not be a
1987 valid address. So the recursive call might have turned it
1988 into a register. In that case, it isn't a constant any
1989 more, so return it. This has the potential of changing a
1990 MEM into a REG, but we'll assume that it safe. */
1991 if (! CONSTANT_P (constant))
1992 return constant;
1994 return validize_mem (force_const_mem (const_mode, constant));
1997 copy = rtx_alloc (MEM);
1998 PUT_MODE (copy, mode);
1999 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2000 MEM_COPY_ATTRIBUTES (copy, orig);
2001 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2002 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2003 return copy;
2005 default:
2006 break;
2009 copy = rtx_alloc (code);
2010 PUT_MODE (copy, mode);
2011 copy->in_struct = orig->in_struct;
2012 copy->volatil = orig->volatil;
2013 copy->unchanging = orig->unchanging;
2015 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2017 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2019 switch (*format_ptr++)
2021 case '0':
2022 /* Copy this through the wide int field; that's safest. */
2023 X0WINT (copy, i) = X0WINT (orig, i);
2024 break;
2026 case 'e':
2027 XEXP (copy, i)
2028 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2029 break;
2031 case 'u':
2032 /* Change any references to old-insns to point to the
2033 corresponding copied insns. */
2034 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2035 break;
2037 case 'E':
2038 XVEC (copy, i) = XVEC (orig, i);
2039 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2041 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2042 for (j = 0; j < XVECLEN (copy, i); j++)
2043 XVECEXP (copy, i, j)
2044 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2045 map, for_lhs);
2047 break;
2049 case 'w':
2050 XWINT (copy, i) = XWINT (orig, i);
2051 break;
2053 case 'i':
2054 XINT (copy, i) = XINT (orig, i);
2055 break;
2057 case 's':
2058 XSTR (copy, i) = XSTR (orig, i);
2059 break;
2061 case 't':
2062 XTREE (copy, i) = XTREE (orig, i);
2063 break;
2065 default:
2066 abort ();
2070 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2072 map->orig_asm_operands_vector = XVEC (orig, 3);
2073 map->copy_asm_operands_vector = XVEC (copy, 3);
2074 map->copy_asm_constraints_vector = XVEC (copy, 4);
2077 return copy;
2080 /* Substitute known constant values into INSN, if that is valid. */
2082 void
2083 try_constants (insn, map)
2084 rtx insn;
2085 struct inline_remap *map;
2087 int i;
2089 map->num_sets = 0;
2091 /* First try just updating addresses, then other things. This is
2092 important when we have something like the store of a constant
2093 into memory and we can update the memory address but the machine
2094 does not support a constant source. */
2095 subst_constants (&PATTERN (insn), insn, map, 1);
2096 apply_change_group ();
2097 subst_constants (&PATTERN (insn), insn, map, 0);
2098 apply_change_group ();
2100 /* Show we don't know the value of anything stored or clobbered. */
2101 note_stores (PATTERN (insn), mark_stores, NULL);
2102 map->last_pc_value = 0;
2103 #ifdef HAVE_cc0
2104 map->last_cc0_value = 0;
2105 #endif
2107 /* Set up any constant equivalences made in this insn. */
2108 for (i = 0; i < map->num_sets; i++)
2110 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2112 int regno = REGNO (map->equiv_sets[i].dest);
2114 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2115 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2116 /* Following clause is a hack to make case work where GNU C++
2117 reassigns a variable to make cse work right. */
2118 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2119 regno).rtx,
2120 map->equiv_sets[i].equiv))
2121 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2122 map->equiv_sets[i].equiv, map->const_age);
2124 else if (map->equiv_sets[i].dest == pc_rtx)
2125 map->last_pc_value = map->equiv_sets[i].equiv;
2126 #ifdef HAVE_cc0
2127 else if (map->equiv_sets[i].dest == cc0_rtx)
2128 map->last_cc0_value = map->equiv_sets[i].equiv;
2129 #endif
2133 /* Substitute known constants for pseudo regs in the contents of LOC,
2134 which are part of INSN.
2135 If INSN is zero, the substitution should always be done (this is used to
2136 update DECL_RTL).
2137 These changes are taken out by try_constants if the result is not valid.
2139 Note that we are more concerned with determining when the result of a SET
2140 is a constant, for further propagation, than actually inserting constants
2141 into insns; cse will do the latter task better.
2143 This function is also used to adjust address of items previously addressed
2144 via the virtual stack variable or virtual incoming arguments registers.
2146 If MEMONLY is nonzero, only make changes inside a MEM. */
2148 static void
2149 subst_constants (loc, insn, map, memonly)
2150 rtx *loc;
2151 rtx insn;
2152 struct inline_remap *map;
2153 int memonly;
2155 rtx x = *loc;
2156 register int i, j;
2157 register enum rtx_code code;
2158 register const char *format_ptr;
2159 int num_changes = num_validated_changes ();
2160 rtx new = 0;
2161 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2163 code = GET_CODE (x);
2165 switch (code)
2167 case PC:
2168 case CONST_INT:
2169 case CONST_DOUBLE:
2170 case SYMBOL_REF:
2171 case CONST:
2172 case LABEL_REF:
2173 case ADDRESS:
2174 return;
2176 #ifdef HAVE_cc0
2177 case CC0:
2178 if (! memonly)
2179 validate_change (insn, loc, map->last_cc0_value, 1);
2180 return;
2181 #endif
2183 case USE:
2184 case CLOBBER:
2185 /* The only thing we can do with a USE or CLOBBER is possibly do
2186 some substitutions in a MEM within it. */
2187 if (GET_CODE (XEXP (x, 0)) == MEM)
2188 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2189 return;
2191 case REG:
2192 /* Substitute for parms and known constants. Don't replace
2193 hard regs used as user variables with constants. */
2194 if (! memonly)
2196 int regno = REGNO (x);
2197 struct const_equiv_data *p;
2199 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2200 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2201 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2202 p->rtx != 0)
2203 && p->age >= map->const_age)
2204 validate_change (insn, loc, p->rtx, 1);
2206 return;
2208 case SUBREG:
2209 /* SUBREG applied to something other than a reg
2210 should be treated as ordinary, since that must
2211 be a special hack and we don't know how to treat it specially.
2212 Consider for example mulsidi3 in m68k.md.
2213 Ordinary SUBREG of a REG needs this special treatment. */
2214 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2216 rtx inner = SUBREG_REG (x);
2217 rtx new = 0;
2219 /* We can't call subst_constants on &SUBREG_REG (x) because any
2220 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2221 see what is inside, try to form the new SUBREG and see if that is
2222 valid. We handle two cases: extracting a full word in an
2223 integral mode and extracting the low part. */
2224 subst_constants (&inner, NULL_RTX, map, 0);
2226 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2227 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2228 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2229 new = operand_subword (inner, SUBREG_WORD (x), 0,
2230 GET_MODE (SUBREG_REG (x)));
2232 cancel_changes (num_changes);
2233 if (new == 0 && subreg_lowpart_p (x))
2234 new = gen_lowpart_common (GET_MODE (x), inner);
2236 if (new)
2237 validate_change (insn, loc, new, 1);
2239 return;
2241 break;
2243 case MEM:
2244 subst_constants (&XEXP (x, 0), insn, map, 0);
2246 /* If a memory address got spoiled, change it back. */
2247 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2248 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2249 cancel_changes (num_changes);
2250 return;
2252 case SET:
2254 /* Substitute constants in our source, and in any arguments to a
2255 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2256 itself. */
2257 rtx *dest_loc = &SET_DEST (x);
2258 rtx dest = *dest_loc;
2259 rtx src, tem;
2261 subst_constants (&SET_SRC (x), insn, map, memonly);
2262 src = SET_SRC (x);
2264 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2265 || GET_CODE (*dest_loc) == SUBREG
2266 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2268 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2270 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2271 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2273 dest_loc = &XEXP (*dest_loc, 0);
2276 /* Do substitute in the address of a destination in memory. */
2277 if (GET_CODE (*dest_loc) == MEM)
2278 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2280 /* Check for the case of DEST a SUBREG, both it and the underlying
2281 register are less than one word, and the SUBREG has the wider mode.
2282 In the case, we are really setting the underlying register to the
2283 source converted to the mode of DEST. So indicate that. */
2284 if (GET_CODE (dest) == SUBREG
2285 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2286 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2287 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2288 <= GET_MODE_SIZE (GET_MODE (dest)))
2289 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2290 src)))
2291 src = tem, dest = SUBREG_REG (dest);
2293 /* If storing a recognizable value save it for later recording. */
2294 if ((map->num_sets < MAX_RECOG_OPERANDS)
2295 && (CONSTANT_P (src)
2296 || (GET_CODE (src) == REG
2297 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2298 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2299 || (GET_CODE (src) == PLUS
2300 && GET_CODE (XEXP (src, 0)) == REG
2301 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2302 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2303 && CONSTANT_P (XEXP (src, 1)))
2304 || GET_CODE (src) == COMPARE
2305 #ifdef HAVE_cc0
2306 || dest == cc0_rtx
2307 #endif
2308 || (dest == pc_rtx
2309 && (src == pc_rtx || GET_CODE (src) == RETURN
2310 || GET_CODE (src) == LABEL_REF))))
2312 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2313 it will cause us to save the COMPARE with any constants
2314 substituted, which is what we want for later. */
2315 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2316 map->equiv_sets[map->num_sets++].dest = dest;
2319 return;
2321 default:
2322 break;
2325 format_ptr = GET_RTX_FORMAT (code);
2327 /* If the first operand is an expression, save its mode for later. */
2328 if (*format_ptr == 'e')
2329 op0_mode = GET_MODE (XEXP (x, 0));
2331 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2333 switch (*format_ptr++)
2335 case '0':
2336 break;
2338 case 'e':
2339 if (XEXP (x, i))
2340 subst_constants (&XEXP (x, i), insn, map, memonly);
2341 break;
2343 case 'u':
2344 case 'i':
2345 case 's':
2346 case 'w':
2347 case 't':
2348 break;
2350 case 'E':
2351 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2352 for (j = 0; j < XVECLEN (x, i); j++)
2353 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2355 break;
2357 default:
2358 abort ();
2362 /* If this is a commutative operation, move a constant to the second
2363 operand unless the second operand is already a CONST_INT. */
2364 if (! memonly
2365 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2366 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2368 rtx tem = XEXP (x, 0);
2369 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2370 validate_change (insn, &XEXP (x, 1), tem, 1);
2373 /* Simplify the expression in case we put in some constants. */
2374 if (! memonly)
2375 switch (GET_RTX_CLASS (code))
2377 case '1':
2378 if (op0_mode == MAX_MACHINE_MODE)
2379 abort ();
2380 new = simplify_unary_operation (code, GET_MODE (x),
2381 XEXP (x, 0), op0_mode);
2382 break;
2384 case '<':
2386 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2388 if (op_mode == VOIDmode)
2389 op_mode = GET_MODE (XEXP (x, 1));
2390 new = simplify_relational_operation (code, op_mode,
2391 XEXP (x, 0), XEXP (x, 1));
2392 #ifdef FLOAT_STORE_FLAG_VALUE
2393 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2395 enum machine_mode mode = GET_MODE (x);
2396 if (new == const0_rtx)
2397 new = CONST0_RTX (mode);
2398 else
2400 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2401 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2404 #endif
2405 break;
2408 case '2':
2409 case 'c':
2410 new = simplify_binary_operation (code, GET_MODE (x),
2411 XEXP (x, 0), XEXP (x, 1));
2412 break;
2414 case 'b':
2415 case '3':
2416 if (op0_mode == MAX_MACHINE_MODE)
2417 abort ();
2419 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2420 XEXP (x, 0), XEXP (x, 1),
2421 XEXP (x, 2));
2422 break;
2425 if (new)
2426 validate_change (insn, loc, new, 1);
2429 /* Show that register modified no longer contain known constants. We are
2430 called from note_stores with parts of the new insn. */
2432 static void
2433 mark_stores (dest, x, data)
2434 rtx dest;
2435 rtx x ATTRIBUTE_UNUSED;
2436 void *data ATTRIBUTE_UNUSED;
2438 int regno = -1;
2439 enum machine_mode mode = VOIDmode;
2441 /* DEST is always the innermost thing set, except in the case of
2442 SUBREGs of hard registers. */
2444 if (GET_CODE (dest) == REG)
2445 regno = REGNO (dest), mode = GET_MODE (dest);
2446 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2448 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2449 mode = GET_MODE (SUBREG_REG (dest));
2452 if (regno >= 0)
2454 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2455 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2456 int i;
2458 /* Ignore virtual stack var or virtual arg register since those
2459 are handled separately. */
2460 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
2461 && regno != VIRTUAL_STACK_VARS_REGNUM)
2462 for (i = regno; i <= last_reg; i++)
2463 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2464 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2468 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2469 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2470 that it points to the node itself, thus indicating that the node is its
2471 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2472 the given node is NULL, recursively descend the decl/block tree which
2473 it is the root of, and for each other ..._DECL or BLOCK node contained
2474 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2475 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2476 values to point to themselves. */
2478 static void
2479 set_block_origin_self (stmt)
2480 register tree stmt;
2482 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2484 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2487 register tree local_decl;
2489 for (local_decl = BLOCK_VARS (stmt);
2490 local_decl != NULL_TREE;
2491 local_decl = TREE_CHAIN (local_decl))
2492 set_decl_origin_self (local_decl); /* Potential recursion. */
2496 register tree subblock;
2498 for (subblock = BLOCK_SUBBLOCKS (stmt);
2499 subblock != NULL_TREE;
2500 subblock = BLOCK_CHAIN (subblock))
2501 set_block_origin_self (subblock); /* Recurse. */
2506 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2507 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2508 node to so that it points to the node itself, thus indicating that the
2509 node represents its own (abstract) origin. Additionally, if the
2510 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2511 the decl/block tree of which the given node is the root of, and for
2512 each other ..._DECL or BLOCK node contained therein whose
2513 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2514 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2515 point to themselves. */
2517 static void
2518 set_decl_origin_self (decl)
2519 register tree decl;
2521 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2523 DECL_ABSTRACT_ORIGIN (decl) = decl;
2524 if (TREE_CODE (decl) == FUNCTION_DECL)
2526 register tree arg;
2528 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2529 DECL_ABSTRACT_ORIGIN (arg) = arg;
2530 if (DECL_INITIAL (decl) != NULL_TREE
2531 && DECL_INITIAL (decl) != error_mark_node)
2532 set_block_origin_self (DECL_INITIAL (decl));
2537 /* Given a pointer to some BLOCK node, and a boolean value to set the
2538 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2539 the given block, and for all local decls and all local sub-blocks
2540 (recursively) which are contained therein. */
2542 static void
2543 set_block_abstract_flags (stmt, setting)
2544 register tree stmt;
2545 register int setting;
2547 register tree local_decl;
2548 register tree subblock;
2550 BLOCK_ABSTRACT (stmt) = setting;
2552 for (local_decl = BLOCK_VARS (stmt);
2553 local_decl != NULL_TREE;
2554 local_decl = TREE_CHAIN (local_decl))
2555 set_decl_abstract_flags (local_decl, setting);
2557 for (subblock = BLOCK_SUBBLOCKS (stmt);
2558 subblock != NULL_TREE;
2559 subblock = BLOCK_CHAIN (subblock))
2560 set_block_abstract_flags (subblock, setting);
2563 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2564 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2565 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2566 set the abstract flags for all of the parameters, local vars, local
2567 blocks and sub-blocks (recursively) to the same setting. */
2569 void
2570 set_decl_abstract_flags (decl, setting)
2571 register tree decl;
2572 register int setting;
2574 DECL_ABSTRACT (decl) = setting;
2575 if (TREE_CODE (decl) == FUNCTION_DECL)
2577 register tree arg;
2579 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2580 DECL_ABSTRACT (arg) = setting;
2581 if (DECL_INITIAL (decl) != NULL_TREE
2582 && DECL_INITIAL (decl) != error_mark_node)
2583 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2587 /* Output the assembly language code for the function FNDECL
2588 from its DECL_SAVED_INSNS. Used for inline functions that are output
2589 at end of compilation instead of where they came in the source. */
2591 void
2592 output_inline_function (fndecl)
2593 tree fndecl;
2595 struct function *old_cfun = cfun;
2596 struct function *f = DECL_SAVED_INSNS (fndecl);
2598 cfun = f;
2599 current_function_decl = fndecl;
2600 clear_emit_caches ();
2602 /* Things we allocate from here on are part of this function, not
2603 permanent. */
2604 temporary_allocation ();
2606 set_new_last_label_num (f->inl_max_label_num);
2608 /* We must have already output DWARF debugging information for the
2609 original (abstract) inline function declaration/definition, so
2610 we want to make sure that the debugging information we generate
2611 for this special instance of the inline function refers back to
2612 the information we already generated. To make sure that happens,
2613 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2614 node (and for all of the local ..._DECL nodes which are its children)
2615 so that they all point to themselves. */
2617 set_decl_origin_self (fndecl);
2619 /* We're not deferring this any longer. */
2620 DECL_DEFER_OUTPUT (fndecl) = 0;
2622 /* We can't inline this anymore. */
2623 f->inlinable = 0;
2624 DECL_INLINE (fndecl) = 0;
2626 /* Compile this function all the way down to assembly code. */
2627 rest_of_compilation (fndecl);
2629 cfun = old_cfun;
2630 current_function_decl = old_cfun ? old_cfun->decl : 0;