integrate.c (expand_inline_function): Fix bugs in previous change from Oct 28, 1999.
[official-gcc.git] / gcc / integrate.c
blob3668ab91a993b27dfc01c7a498e80114ad241db4
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "insn-flags.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 static rtvec initialize_for_inline PROTO((tree));
66 static void note_modified_parmregs PROTO((rtx, rtx, void *));
67 static void integrate_parm_decls PROTO((tree, struct inline_remap *,
68 rtvec));
69 static tree integrate_decl_tree PROTO((tree,
70 struct inline_remap *));
71 static void subst_constants PROTO((rtx *, rtx,
72 struct inline_remap *, int));
73 static void set_block_origin_self PROTO((tree));
74 static void set_decl_origin_self PROTO((tree));
75 static void set_block_abstract_flags PROTO((tree, int));
76 static void process_reg_param PROTO((struct inline_remap *, rtx,
77 rtx));
78 void set_decl_abstract_flags PROTO((tree, int));
79 static tree copy_and_set_decl_abstract_origin PROTO((tree));
80 static rtx expand_inline_function_eh_labelmap PROTO((rtx));
81 static void mark_stores PROTO((rtx, rtx, void *));
83 /* The maximum number of instructions accepted for inlining a
84 function. Increasing values mean more agressive inlining.
85 This affects currently only functions explicitly marked as
86 inline (or methods defined within the class definition for C++).
87 The default value of 10000 is arbitrary but high to match the
88 previously unlimited gcc capabilities. */
90 int inline_max_insns = 10000;
92 /* Used by copy_rtx_and_substitute; this indicates whether the function is
93 called for the purpose of inlining or some other purpose (i.e. loop
94 unrolling). This affects how constant pool references are handled.
95 This variable contains the FUNCTION_DECL for the inlined function. */
96 static struct function *inlining = 0;
98 /* Returns the Ith entry in the label_map contained in MAP. If the
99 Ith entry has not yet been set, return a fresh label. This function
100 performs a lazy initialization of label_map, thereby avoiding huge memory
101 explosions when the label_map gets very large. */
104 get_label_from_map (map, i)
105 struct inline_remap *map;
106 int i;
108 rtx x = map->label_map[i];
110 if (x == NULL_RTX)
111 x = map->label_map[i] = gen_label_rtx();
113 return x;
116 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
117 is safe and reasonable to integrate into other functions.
118 Nonzero means value is a warning msgid with a single %s
119 for the function's name. */
121 const char *
122 function_cannot_inline_p (fndecl)
123 register tree fndecl;
125 register rtx insn;
126 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
128 /* For functions marked as inline increase the maximum size to
129 inline_max_insns (-finline-limit-<n>). For regular functions
130 use the limit given by INTEGRATE_THRESHOLD. */
132 int max_insns = (DECL_INLINE (fndecl))
133 ? (inline_max_insns
134 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
135 : INTEGRATE_THRESHOLD (fndecl);
137 register int ninsns = 0;
138 register tree parms;
139 rtx result;
141 /* No inlines with varargs. */
142 if ((last && TREE_VALUE (last) != void_type_node)
143 || current_function_varargs)
144 return N_("varargs function cannot be inline");
146 if (current_function_calls_alloca)
147 return N_("function using alloca cannot be inline");
149 if (current_function_calls_setjmp)
150 return N_("function using setjmp cannot be inline");
152 if (current_function_contains_functions)
153 return N_("function with nested functions cannot be inline");
155 if (forced_labels)
156 return
157 N_("function with label addresses used in initializers cannot inline");
159 if (current_function_cannot_inline)
160 return current_function_cannot_inline;
162 /* If its not even close, don't even look. */
163 if (get_max_uid () > 3 * max_insns)
164 return N_("function too large to be inline");
166 #if 0
167 /* Don't inline functions which do not specify a function prototype and
168 have BLKmode argument or take the address of a parameter. */
169 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
171 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
172 TREE_ADDRESSABLE (parms) = 1;
173 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
174 return N_("no prototype, and parameter address used; cannot be inline");
176 #endif
178 /* We can't inline functions that return structures
179 the old-fashioned PCC way, copying into a static block. */
180 if (current_function_returns_pcc_struct)
181 return N_("inline functions not supported for this return value type");
183 /* We can't inline functions that return structures of varying size. */
184 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
185 return N_("function with varying-size return value cannot be inline");
187 /* Cannot inline a function with a varying size argument or one that
188 receives a transparent union. */
189 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
191 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
192 return N_("function with varying-size parameter cannot be inline");
193 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
194 return N_("function with transparent unit parameter cannot be inline");
197 if (get_max_uid () > max_insns)
199 for (ninsns = 0, insn = get_first_nonparm_insn ();
200 insn && ninsns < max_insns;
201 insn = NEXT_INSN (insn))
202 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
203 ninsns++;
205 if (ninsns >= max_insns)
206 return N_("function too large to be inline");
209 /* We will not inline a function which uses computed goto. The addresses of
210 its local labels, which may be tucked into global storage, are of course
211 not constant across instantiations, which causes unexpected behaviour. */
212 if (current_function_has_computed_jump)
213 return N_("function with computed jump cannot inline");
215 /* We cannot inline a nested function that jumps to a nonlocal label. */
216 if (current_function_has_nonlocal_goto)
217 return N_("function with nonlocal goto cannot be inline");
219 /* This is a hack, until the inliner is taught about eh regions at
220 the start of the function. */
221 for (insn = get_insns ();
222 insn
223 && ! (GET_CODE (insn) == NOTE
224 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
225 insn = NEXT_INSN (insn))
227 if (insn && GET_CODE (insn) == NOTE
228 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
229 return N_("function with complex parameters cannot be inline");
232 /* We can't inline functions that return a PARALLEL rtx. */
233 result = DECL_RTL (DECL_RESULT (fndecl));
234 if (result && GET_CODE (result) == PARALLEL)
235 return N_("inline functions not supported for this return value type");
237 return 0;
240 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
241 Zero for a reg that isn't a parm's home.
242 Only reg numbers less than max_parm_reg are mapped here. */
243 static tree *parmdecl_map;
245 /* In save_for_inline, nonzero if past the parm-initialization insns. */
246 static int in_nonparm_insns;
248 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
249 needed to save FNDECL's insns and info for future inline expansion. */
251 static rtvec
252 initialize_for_inline (fndecl)
253 tree fndecl;
255 int i;
256 rtvec arg_vector;
257 tree parms;
259 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
260 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
261 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
263 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
264 parms;
265 parms = TREE_CHAIN (parms), i++)
267 rtx p = DECL_RTL (parms);
269 /* If we have (mem (addressof (mem ...))), use the inner MEM since
270 otherwise the copy_rtx call below will not unshare the MEM since
271 it shares ADDRESSOF. */
272 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
273 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
274 p = XEXP (XEXP (p, 0), 0);
276 RTVEC_ELT (arg_vector, i) = p;
278 if (GET_CODE (p) == REG)
279 parmdecl_map[REGNO (p)] = parms;
280 else if (GET_CODE (p) == CONCAT)
282 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
283 rtx pimag = gen_imagpart (GET_MODE (preal), p);
285 if (GET_CODE (preal) == REG)
286 parmdecl_map[REGNO (preal)] = parms;
287 if (GET_CODE (pimag) == REG)
288 parmdecl_map[REGNO (pimag)] = parms;
291 /* This flag is cleared later
292 if the function ever modifies the value of the parm. */
293 TREE_READONLY (parms) = 1;
296 return arg_vector;
299 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
300 DECL_ABSTRACT_ORIGIN for the new accordinly. */
302 static tree
303 copy_and_set_decl_abstract_origin (node)
304 tree node;
306 tree copy = copy_node (node);
307 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
308 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
309 situation occurs if we inline a function which itself made
310 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
311 most distant ancestor, we don't have to do anything here. */
313 else
314 /* The most distant ancestor must be NODE. */
315 DECL_ABSTRACT_ORIGIN (copy) = node;
317 return copy;
320 /* Make the insns and PARM_DECLs of the current function permanent
321 and record other information in DECL_SAVED_INSNS to allow inlining
322 of this function in subsequent calls.
324 This routine need not copy any insns because we are not going
325 to immediately compile the insns in the insn chain. There
326 are two cases when we would compile the insns for FNDECL:
327 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
328 be output at the end of other compilation, because somebody took
329 its address. In the first case, the insns of FNDECL are copied
330 as it is expanded inline, so FNDECL's saved insns are not
331 modified. In the second case, FNDECL is used for the last time,
332 so modifying the rtl is not a problem.
334 We don't have to worry about FNDECL being inline expanded by
335 other functions which are written at the end of compilation
336 because flag_no_inline is turned on when we begin writing
337 functions at the end of compilation. */
339 void
340 save_for_inline_nocopy (fndecl)
341 tree fndecl;
343 rtx insn;
344 rtvec argvec;
345 rtx first_nonparm_insn;
347 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
348 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
349 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
350 for the parms, prior to elimination of virtual registers.
351 These values are needed for substituting parms properly. */
353 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
355 /* Make and emit a return-label if we have not already done so. */
357 if (return_label == 0)
359 return_label = gen_label_rtx ();
360 emit_label (return_label);
363 argvec = initialize_for_inline (fndecl);
365 /* If there are insns that copy parms from the stack into pseudo registers,
366 those insns are not copied. `expand_inline_function' must
367 emit the correct code to handle such things. */
369 insn = get_insns ();
370 if (GET_CODE (insn) != NOTE)
371 abort ();
373 /* Get the insn which signals the end of parameter setup code. */
374 first_nonparm_insn = get_first_nonparm_insn ();
376 /* Now just scan the chain of insns to see what happens to our
377 PARM_DECLs. If a PARM_DECL is used but never modified, we
378 can substitute its rtl directly when expanding inline (and
379 perform constant folding when its incoming value is constant).
380 Otherwise, we have to copy its value into a new register and track
381 the new register's life. */
383 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
385 if (insn == first_nonparm_insn)
386 in_nonparm_insns = 1;
388 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
389 /* Record what interesting things happen to our parameters. */
390 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
393 /* We have now allocated all that needs to be allocated permanently
394 on the rtx obstack. Set our high-water mark, so that we
395 can free the rest of this when the time comes. */
397 preserve_data ();
399 current_function->inl_max_label_num = max_label_num ();
400 current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
401 current_function->original_arg_vector = argvec;
402 current_function->original_decl_initial = DECL_INITIAL (fndecl);
403 DECL_SAVED_INSNS (fndecl) = current_function;
406 /* Note whether a parameter is modified or not. */
408 static void
409 note_modified_parmregs (reg, x, data)
410 rtx reg;
411 rtx x ATTRIBUTE_UNUSED;
412 void *data ATTRIBUTE_UNUSED;
414 if (GET_CODE (reg) == REG && in_nonparm_insns
415 && REGNO (reg) < max_parm_reg
416 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
417 && parmdecl_map[REGNO (reg)] != 0)
418 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
421 /* Unfortunately, we need a global copy of const_equiv map for communication
422 with a function called from note_stores. Be *very* careful that this
423 is used properly in the presence of recursion. */
425 varray_type global_const_equiv_varray;
427 #define FIXED_BASE_PLUS_P(X) \
428 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
429 && GET_CODE (XEXP (X, 0)) == REG \
430 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
431 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
433 /* Called to set up a mapping for the case where a parameter is in a
434 register. If it is read-only and our argument is a constant, set up the
435 constant equivalence.
437 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
438 if it is a register.
440 Also, don't allow hard registers here; they might not be valid when
441 substituted into insns. */
442 static void
443 process_reg_param (map, loc, copy)
444 struct inline_remap *map;
445 rtx loc, copy;
447 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
448 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
449 && ! REG_USERVAR_P (copy))
450 || (GET_CODE (copy) == REG
451 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
453 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
454 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
455 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
456 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
457 copy = temp;
459 map->reg_map[REGNO (loc)] = copy;
462 /* Used by duplicate_eh_handlers to map labels for the exception table */
463 static struct inline_remap *eif_eh_map;
465 static rtx
466 expand_inline_function_eh_labelmap (label)
467 rtx label;
469 int index = CODE_LABEL_NUMBER (label);
470 return get_label_from_map (eif_eh_map, index);
473 /* Integrate the procedure defined by FNDECL. Note that this function
474 may wind up calling itself. Since the static variables are not
475 reentrant, we do not assign them until after the possibility
476 of recursion is eliminated.
478 If IGNORE is nonzero, do not produce a value.
479 Otherwise store the value in TARGET if it is nonzero and that is convenient.
481 Value is:
482 (rtx)-1 if we could not substitute the function
483 0 if we substituted it and it does not produce a value
484 else an rtx for where the value is stored. */
487 expand_inline_function (fndecl, parms, target, ignore, type,
488 structure_value_addr)
489 tree fndecl, parms;
490 rtx target;
491 int ignore;
492 tree type;
493 rtx structure_value_addr;
495 struct function *inlining_previous;
496 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
497 tree formal, actual, block;
498 rtx parm_insns = inl_f->emit->x_first_insn;
499 rtx insns = (inl_f->inl_last_parm_insn
500 ? NEXT_INSN (inl_f->inl_last_parm_insn)
501 : parm_insns);
502 tree *arg_trees;
503 rtx *arg_vals;
504 rtx insn;
505 int max_regno;
506 register int i;
507 int min_labelno = inl_f->emit->x_first_label_num;
508 int max_labelno = inl_f->inl_max_label_num;
509 int nargs;
510 rtx local_return_label = 0;
511 rtx loc;
512 rtx stack_save = 0;
513 rtx temp;
514 struct inline_remap *map = 0;
515 #ifdef HAVE_cc0
516 rtx cc0_insn = 0;
517 #endif
518 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
519 rtx static_chain_value = 0;
520 int inl_max_uid;
522 /* The pointer used to track the true location of the memory used
523 for MAP->LABEL_MAP. */
524 rtx *real_label_map = 0;
526 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
527 max_regno = inl_f->emit->x_reg_rtx_no + 3;
528 if (max_regno < FIRST_PSEUDO_REGISTER)
529 abort ();
531 nargs = list_length (DECL_ARGUMENTS (fndecl));
533 /* Check that the parms type match and that sufficient arguments were
534 passed. Since the appropriate conversions or default promotions have
535 already been applied, the machine modes should match exactly. */
537 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
538 formal;
539 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
541 tree arg;
542 enum machine_mode mode;
544 if (actual == 0)
545 return (rtx) (HOST_WIDE_INT) -1;
547 arg = TREE_VALUE (actual);
548 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
550 if (mode != TYPE_MODE (TREE_TYPE (arg))
551 /* If they are block mode, the types should match exactly.
552 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
553 which could happen if the parameter has incomplete type. */
554 || (mode == BLKmode
555 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
556 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
557 return (rtx) (HOST_WIDE_INT) -1;
560 /* Extra arguments are valid, but will be ignored below, so we must
561 evaluate them here for side-effects. */
562 for (; actual; actual = TREE_CHAIN (actual))
563 expand_expr (TREE_VALUE (actual), const0_rtx,
564 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
566 /* Expand the function arguments. Do this first so that any
567 new registers get created before we allocate the maps. */
569 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
570 arg_trees = (tree *) alloca (nargs * sizeof (tree));
572 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
573 formal;
574 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
576 /* Actual parameter, converted to the type of the argument within the
577 function. */
578 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
579 /* Mode of the variable used within the function. */
580 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
581 int invisiref = 0;
583 arg_trees[i] = arg;
584 loc = RTVEC_ELT (arg_vector, i);
586 /* If this is an object passed by invisible reference, we copy the
587 object into a stack slot and save its address. If this will go
588 into memory, we do nothing now. Otherwise, we just expand the
589 argument. */
590 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
591 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
593 rtx stack_slot
594 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
595 int_size_in_bytes (TREE_TYPE (arg)), 1);
596 MEM_SET_IN_STRUCT_P (stack_slot,
597 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
599 store_expr (arg, stack_slot, 0);
601 arg_vals[i] = XEXP (stack_slot, 0);
602 invisiref = 1;
604 else if (GET_CODE (loc) != MEM)
606 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
607 /* The mode if LOC and ARG can differ if LOC was a variable
608 that had its mode promoted via PROMOTED_MODE. */
609 arg_vals[i] = convert_modes (GET_MODE (loc),
610 TYPE_MODE (TREE_TYPE (arg)),
611 expand_expr (arg, NULL_RTX, mode,
612 EXPAND_SUM),
613 TREE_UNSIGNED (TREE_TYPE (formal)));
614 else
615 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
617 else
618 arg_vals[i] = 0;
620 if (arg_vals[i] != 0
621 && (! TREE_READONLY (formal)
622 /* If the parameter is not read-only, copy our argument through
623 a register. Also, we cannot use ARG_VALS[I] if it overlaps
624 TARGET in any way. In the inline function, they will likely
625 be two different pseudos, and `safe_from_p' will make all
626 sorts of smart assumptions about their not conflicting.
627 But if ARG_VALS[I] overlaps TARGET, these assumptions are
628 wrong, so put ARG_VALS[I] into a fresh register.
629 Don't worry about invisible references, since their stack
630 temps will never overlap the target. */
631 || (target != 0
632 && ! invisiref
633 && (GET_CODE (arg_vals[i]) == REG
634 || GET_CODE (arg_vals[i]) == SUBREG
635 || GET_CODE (arg_vals[i]) == MEM)
636 && reg_overlap_mentioned_p (arg_vals[i], target))
637 /* ??? We must always copy a SUBREG into a REG, because it might
638 get substituted into an address, and not all ports correctly
639 handle SUBREGs in addresses. */
640 || (GET_CODE (arg_vals[i]) == SUBREG)))
641 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
643 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
644 && POINTER_TYPE_P (TREE_TYPE (formal)))
645 mark_reg_pointer (arg_vals[i],
646 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
647 / BITS_PER_UNIT));
650 /* Allocate the structures we use to remap things. */
652 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
653 map->fndecl = fndecl;
655 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
656 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
658 /* We used to use alloca here, but the size of what it would try to
659 allocate would occasionally cause it to exceed the stack limit and
660 cause unpredictable core dumps. */
661 real_label_map
662 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
663 map->label_map = real_label_map;
665 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
666 map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
667 bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
668 map->min_insnno = 0;
669 map->max_insnno = inl_max_uid;
671 map->integrating = 1;
673 /* const_equiv_varray maps pseudos in our routine to constants, so
674 it needs to be large enough for all our pseudos. This is the
675 number we are currently using plus the number in the called
676 routine, plus 15 for each arg, five to compute the virtual frame
677 pointer, and five for the return value. This should be enough
678 for most cases. We do not reference entries outside the range of
679 the map.
681 ??? These numbers are quite arbitrary and were obtained by
682 experimentation. At some point, we should try to allocate the
683 table after all the parameters are set up so we an more accurately
684 estimate the number of pseudos we will need. */
686 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
687 (max_reg_num ()
688 + (max_regno - FIRST_PSEUDO_REGISTER)
689 + 15 * nargs
690 + 10),
691 "expand_inline_function");
692 map->const_age = 0;
694 /* Record the current insn in case we have to set up pointers to frame
695 and argument memory blocks. If there are no insns yet, add a dummy
696 insn that can be used as an insertion point. */
697 map->insns_at_start = get_last_insn ();
698 if (map->insns_at_start == 0)
699 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
701 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
702 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
704 /* Update the outgoing argument size to allow for those in the inlined
705 function. */
706 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
707 current_function_outgoing_args_size = inl_f->outgoing_args_size;
709 /* If the inline function needs to make PIC references, that means
710 that this function's PIC offset table must be used. */
711 if (inl_f->uses_pic_offset_table)
712 current_function_uses_pic_offset_table = 1;
714 /* If this function needs a context, set it up. */
715 if (inl_f->needs_context)
716 static_chain_value = lookup_static_chain (fndecl);
718 if (GET_CODE (parm_insns) == NOTE
719 && NOTE_LINE_NUMBER (parm_insns) > 0)
721 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
722 NOTE_LINE_NUMBER (parm_insns));
723 if (note)
724 RTX_INTEGRATED_P (note) = 1;
727 /* Figure out where the blocks are if we're going to have to insert
728 new BLOCKs into the existing block tree. */
729 if (current_function->x_whole_function_mode_p)
730 find_loop_tree_blocks ();
732 /* Process each argument. For each, set up things so that the function's
733 reference to the argument will refer to the argument being passed.
734 We only replace REG with REG here. Any simplifications are done
735 via const_equiv_map.
737 We make two passes: In the first, we deal with parameters that will
738 be placed into registers, since we need to ensure that the allocated
739 register number fits in const_equiv_map. Then we store all non-register
740 parameters into their memory location. */
742 /* Don't try to free temp stack slots here, because we may put one of the
743 parameters into a temp stack slot. */
745 for (i = 0; i < nargs; i++)
747 rtx copy = arg_vals[i];
749 loc = RTVEC_ELT (arg_vector, i);
751 /* There are three cases, each handled separately. */
752 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
753 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
755 /* This must be an object passed by invisible reference (it could
756 also be a variable-sized object, but we forbid inlining functions
757 with variable-sized arguments). COPY is the address of the
758 actual value (this computation will cause it to be copied). We
759 map that address for the register, noting the actual address as
760 an equivalent in case it can be substituted into the insns. */
762 if (GET_CODE (copy) != REG)
764 temp = copy_addr_to_reg (copy);
765 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
766 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
767 copy = temp;
769 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
771 else if (GET_CODE (loc) == MEM)
773 /* This is the case of a parameter that lives in memory. It
774 will live in the block we allocate in the called routine's
775 frame that simulates the incoming argument area. Do nothing
776 with the parameter now; we will call store_expr later. In
777 this case, however, we must ensure that the virtual stack and
778 incoming arg rtx values are expanded now so that we can be
779 sure we have enough slots in the const equiv map since the
780 store_expr call can easily blow the size estimate. */
781 if (DECL_FRAME_SIZE (fndecl) != 0)
782 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
784 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
785 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
787 else if (GET_CODE (loc) == REG)
788 process_reg_param (map, loc, copy);
789 else if (GET_CODE (loc) == CONCAT)
791 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
792 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
793 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
794 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
796 process_reg_param (map, locreal, copyreal);
797 process_reg_param (map, locimag, copyimag);
799 else
800 abort ();
803 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
804 specially. This function can be called recursively, so we need to
805 save the previous value. */
806 inlining_previous = inlining;
807 inlining = inl_f;
809 /* Now do the parameters that will be placed in memory. */
811 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
812 formal; formal = TREE_CHAIN (formal), i++)
814 loc = RTVEC_ELT (arg_vector, i);
816 if (GET_CODE (loc) == MEM
817 /* Exclude case handled above. */
818 && ! (GET_CODE (XEXP (loc, 0)) == REG
819 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
821 rtx note = emit_note (DECL_SOURCE_FILE (formal),
822 DECL_SOURCE_LINE (formal));
823 if (note)
824 RTX_INTEGRATED_P (note) = 1;
826 /* Compute the address in the area we reserved and store the
827 value there. */
828 temp = copy_rtx_and_substitute (loc, map, 1);
829 subst_constants (&temp, NULL_RTX, map, 1);
830 apply_change_group ();
831 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
832 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
833 store_expr (arg_trees[i], temp, 0);
837 /* Deal with the places that the function puts its result.
838 We are driven by what is placed into DECL_RESULT.
840 Initially, we assume that we don't have anything special handling for
841 REG_FUNCTION_RETURN_VALUE_P. */
843 map->inline_target = 0;
844 loc = DECL_RTL (DECL_RESULT (fndecl));
846 if (TYPE_MODE (type) == VOIDmode)
847 /* There is no return value to worry about. */
849 else if (GET_CODE (loc) == MEM)
851 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
853 temp = copy_rtx_and_substitute (loc, map, 1);
854 subst_constants (&temp, NULL_RTX, map, 1);
855 apply_change_group ();
856 target = temp;
858 else
860 if (! structure_value_addr
861 || ! aggregate_value_p (DECL_RESULT (fndecl)))
862 abort ();
864 /* Pass the function the address in which to return a structure
865 value. Note that a constructor can cause someone to call us
866 with STRUCTURE_VALUE_ADDR, but the initialization takes place
867 via the first parameter, rather than the struct return address.
869 We have two cases: If the address is a simple register
870 indirect, use the mapping mechanism to point that register to
871 our structure return address. Otherwise, store the structure
872 return value into the place that it will be referenced from. */
874 if (GET_CODE (XEXP (loc, 0)) == REG)
876 temp = force_operand (structure_value_addr, NULL_RTX);
877 temp = force_reg (Pmode, temp);
878 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
880 if (CONSTANT_P (structure_value_addr)
881 || GET_CODE (structure_value_addr) == ADDRESSOF
882 || (GET_CODE (structure_value_addr) == PLUS
883 && (XEXP (structure_value_addr, 0)
884 == virtual_stack_vars_rtx)
885 && (GET_CODE (XEXP (structure_value_addr, 1))
886 == CONST_INT)))
888 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
889 CONST_AGE_PARM);
892 else
894 temp = copy_rtx_and_substitute (loc, map, 1);
895 subst_constants (&temp, NULL_RTX, map, 0);
896 apply_change_group ();
897 emit_move_insn (temp, structure_value_addr);
901 else if (ignore)
902 /* We will ignore the result value, so don't look at its structure.
903 Note that preparations for an aggregate return value
904 do need to be made (above) even if it will be ignored. */
906 else if (GET_CODE (loc) == REG)
908 /* The function returns an object in a register and we use the return
909 value. Set up our target for remapping. */
911 /* Machine mode function was declared to return. */
912 enum machine_mode departing_mode = TYPE_MODE (type);
913 /* (Possibly wider) machine mode it actually computes
914 (for the sake of callers that fail to declare it right).
915 We have to use the mode of the result's RTL, rather than
916 its type, since expand_function_start may have promoted it. */
917 enum machine_mode arriving_mode
918 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
919 rtx reg_to_map;
921 /* Don't use MEMs as direct targets because on some machines
922 substituting a MEM for a REG makes invalid insns.
923 Let the combiner substitute the MEM if that is valid. */
924 if (target == 0 || GET_CODE (target) != REG
925 || GET_MODE (target) != departing_mode)
927 /* Don't make BLKmode registers. If this looks like
928 a BLKmode object being returned in a register, get
929 the mode from that, otherwise abort. */
930 if (departing_mode == BLKmode)
932 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
934 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
935 arriving_mode = departing_mode;
937 else
938 abort();
941 target = gen_reg_rtx (departing_mode);
944 /* If function's value was promoted before return,
945 avoid machine mode mismatch when we substitute INLINE_TARGET.
946 But TARGET is what we will return to the caller. */
947 if (arriving_mode != departing_mode)
949 /* Avoid creating a paradoxical subreg wider than
950 BITS_PER_WORD, since that is illegal. */
951 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
953 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
954 GET_MODE_BITSIZE (arriving_mode)))
955 /* Maybe could be handled by using convert_move () ? */
956 abort ();
957 reg_to_map = gen_reg_rtx (arriving_mode);
958 target = gen_lowpart (departing_mode, reg_to_map);
960 else
961 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
963 else
964 reg_to_map = target;
966 /* Usually, the result value is the machine's return register.
967 Sometimes it may be a pseudo. Handle both cases. */
968 if (REG_FUNCTION_VALUE_P (loc))
969 map->inline_target = reg_to_map;
970 else
971 map->reg_map[REGNO (loc)] = reg_to_map;
973 else
974 abort ();
976 /* Make a fresh binding contour that we can easily remove. Do this after
977 expanding our arguments so cleanups are properly scoped. */
978 expand_start_bindings (0);
980 /* Initialize label_map. get_label_from_map will actually make
981 the labels. */
982 bzero ((char *) &map->label_map [min_labelno],
983 (max_labelno - min_labelno) * sizeof (rtx));
985 /* Perform postincrements before actually calling the function. */
986 emit_queue ();
988 /* Clean up stack so that variables might have smaller offsets. */
989 do_pending_stack_adjust ();
991 /* Save a copy of the location of const_equiv_varray for
992 mark_stores, called via note_stores. */
993 global_const_equiv_varray = map->const_equiv_varray;
995 /* If the called function does an alloca, save and restore the
996 stack pointer around the call. This saves stack space, but
997 also is required if this inline is being done between two
998 pushes. */
999 if (inl_f->calls_alloca)
1000 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1002 /* Now copy the insns one by one. Do this in two passes, first the insns and
1003 then their REG_NOTES, just like save_for_inline. */
1005 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1007 for (insn = insns; insn; insn = NEXT_INSN (insn))
1009 rtx copy, pattern, set;
1011 map->orig_asm_operands_vector = 0;
1013 switch (GET_CODE (insn))
1015 case INSN:
1016 pattern = PATTERN (insn);
1017 set = single_set (insn);
1018 copy = 0;
1019 if (GET_CODE (pattern) == USE
1020 && GET_CODE (XEXP (pattern, 0)) == REG
1021 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1022 /* The (USE (REG n)) at return from the function should
1023 be ignored since we are changing (REG n) into
1024 inline_target. */
1025 break;
1027 /* If the inline fn needs eh context, make sure that
1028 the current fn has one. */
1029 if (GET_CODE (pattern) == USE
1030 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1031 get_eh_context ();
1033 /* Ignore setting a function value that we don't want to use. */
1034 if (map->inline_target == 0
1035 && set != 0
1036 && GET_CODE (SET_DEST (set)) == REG
1037 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1039 if (volatile_refs_p (SET_SRC (set)))
1041 rtx new_set;
1043 /* If we must not delete the source,
1044 load it into a new temporary. */
1045 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1047 new_set = single_set (copy);
1048 if (new_set == 0)
1049 abort ();
1051 SET_DEST (new_set)
1052 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1054 /* If the source and destination are the same and it
1055 has a note on it, keep the insn. */
1056 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1057 && REG_NOTES (insn) != 0)
1058 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1059 else
1060 break;
1063 /* If this is setting the static chain rtx, omit it. */
1064 else if (static_chain_value != 0
1065 && set != 0
1066 && GET_CODE (SET_DEST (set)) == REG
1067 && rtx_equal_p (SET_DEST (set),
1068 static_chain_incoming_rtx))
1069 break;
1071 /* If this is setting the static chain pseudo, set it from
1072 the value we want to give it instead. */
1073 else if (static_chain_value != 0
1074 && set != 0
1075 && rtx_equal_p (SET_SRC (set),
1076 static_chain_incoming_rtx))
1078 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1080 copy = emit_move_insn (newdest, static_chain_value);
1081 static_chain_value = 0;
1084 /* If this is setting the virtual stack vars register, this must
1085 be the code at the handler for a builtin longjmp. The value
1086 saved in the setjmp buffer will be the address of the frame
1087 we've made for this inlined instance within our frame. But we
1088 know the offset of that value so we can use it to reconstruct
1089 our virtual stack vars register from that value. If we are
1090 copying it from the stack pointer, leave it unchanged. */
1091 else if (set != 0
1092 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1094 HOST_WIDE_INT offset;
1095 temp = map->reg_map[REGNO (SET_DEST (set))];
1096 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1097 REGNO (temp)).rtx;
1099 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1100 offset = 0;
1101 else if (GET_CODE (temp) == PLUS
1102 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1103 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1104 offset = INTVAL (XEXP (temp, 1));
1105 else
1106 abort ();
1108 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1109 temp = SET_SRC (set);
1110 else
1111 temp = force_operand (plus_constant (SET_SRC (set),
1112 - offset),
1113 NULL_RTX);
1115 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1118 else
1119 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1120 /* REG_NOTES will be copied later. */
1122 #ifdef HAVE_cc0
1123 /* If this insn is setting CC0, it may need to look at
1124 the insn that uses CC0 to see what type of insn it is.
1125 In that case, the call to recog via validate_change will
1126 fail. So don't substitute constants here. Instead,
1127 do it when we emit the following insn.
1129 For example, see the pyr.md file. That machine has signed and
1130 unsigned compares. The compare patterns must check the
1131 following branch insn to see which what kind of compare to
1132 emit.
1134 If the previous insn set CC0, substitute constants on it as
1135 well. */
1136 if (sets_cc0_p (PATTERN (copy)) != 0)
1137 cc0_insn = copy;
1138 else
1140 if (cc0_insn)
1141 try_constants (cc0_insn, map);
1142 cc0_insn = 0;
1143 try_constants (copy, map);
1145 #else
1146 try_constants (copy, map);
1147 #endif
1148 break;
1150 case JUMP_INSN:
1151 if (GET_CODE (PATTERN (insn)) == RETURN
1152 || (GET_CODE (PATTERN (insn)) == PARALLEL
1153 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1155 if (local_return_label == 0)
1156 local_return_label = gen_label_rtx ();
1157 pattern = gen_jump (local_return_label);
1159 else
1160 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1162 copy = emit_jump_insn (pattern);
1164 #ifdef HAVE_cc0
1165 if (cc0_insn)
1166 try_constants (cc0_insn, map);
1167 cc0_insn = 0;
1168 #endif
1169 try_constants (copy, map);
1171 /* If this used to be a conditional jump insn but whose branch
1172 direction is now know, we must do something special. */
1173 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1175 #ifdef HAVE_cc0
1176 /* If the previous insn set cc0 for us, delete it. */
1177 if (sets_cc0_p (PREV_INSN (copy)))
1178 delete_insn (PREV_INSN (copy));
1179 #endif
1181 /* If this is now a no-op, delete it. */
1182 if (map->last_pc_value == pc_rtx)
1184 delete_insn (copy);
1185 copy = 0;
1187 else
1188 /* Otherwise, this is unconditional jump so we must put a
1189 BARRIER after it. We could do some dead code elimination
1190 here, but jump.c will do it just as well. */
1191 emit_barrier ();
1193 break;
1195 case CALL_INSN:
1196 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1197 copy = emit_call_insn (pattern);
1199 /* Because the USAGE information potentially contains objects other
1200 than hard registers, we need to copy it. */
1201 CALL_INSN_FUNCTION_USAGE (copy)
1202 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1203 map, 0);
1205 #ifdef HAVE_cc0
1206 if (cc0_insn)
1207 try_constants (cc0_insn, map);
1208 cc0_insn = 0;
1209 #endif
1210 try_constants (copy, map);
1212 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1213 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1214 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1215 break;
1217 case CODE_LABEL:
1218 copy = emit_label (get_label_from_map (map,
1219 CODE_LABEL_NUMBER (insn)));
1220 LABEL_NAME (copy) = LABEL_NAME (insn);
1221 map->const_age++;
1222 break;
1224 case BARRIER:
1225 copy = emit_barrier ();
1226 break;
1228 case NOTE:
1229 /* It is important to discard function-end and function-beg notes,
1230 so we have only one of each in the current function.
1231 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1232 deleted these in the copy used for continuing compilation,
1233 not the copy used for inlining). */
1234 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1235 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1236 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1238 copy = emit_note (NOTE_SOURCE_FILE (insn),
1239 NOTE_LINE_NUMBER (insn));
1240 if (copy
1241 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1242 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1244 rtx label
1245 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
1247 /* we have to duplicate the handlers for the original */
1248 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1250 /* We need to duplicate the handlers for the EH region
1251 and we need to indicate where the label map is */
1252 eif_eh_map = map;
1253 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1254 CODE_LABEL_NUMBER (label),
1255 expand_inline_function_eh_labelmap);
1258 /* We have to forward these both to match the new exception
1259 region. */
1260 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
1263 else
1264 copy = 0;
1265 break;
1267 default:
1268 abort ();
1271 if (copy)
1272 RTX_INTEGRATED_P (copy) = 1;
1274 map->insn_map[INSN_UID (insn)] = copy;
1277 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1278 from parameters can be substituted in. These are the only ones that
1279 are valid across the entire function. */
1280 map->const_age++;
1281 for (insn = insns; insn; insn = NEXT_INSN (insn))
1282 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1283 && map->insn_map[INSN_UID (insn)]
1284 && REG_NOTES (insn))
1286 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1288 /* We must also do subst_constants, in case one of our parameters
1289 has const type and constant value. */
1290 subst_constants (&tem, NULL_RTX, map, 0);
1291 apply_change_group ();
1292 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1295 if (local_return_label)
1296 emit_label (local_return_label);
1298 /* Restore the stack pointer if we saved it above. */
1299 if (inl_f->calls_alloca)
1300 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1302 /* Make copies of the decls of the symbols in the inline function, so that
1303 the copies of the variables get declared in the current function. Set
1304 up things so that lookup_static_chain knows that to interpret registers
1305 in SAVE_EXPRs for TYPE_SIZEs as local. */
1307 inline_function_decl = fndecl;
1308 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1309 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1310 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1311 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1312 inline_function_decl = 0;
1314 if (current_function->x_whole_function_mode_p)
1315 /* Insert the block into the already existing block-tree. */
1316 retrofit_block (block, map->insns_at_start);
1317 else
1318 /* In statement-at-a-time mode, we just tell the front-end to add
1319 this block to the list of blocks at this binding level. We
1320 can't do it the way it's done for function-at-a-time mode the
1321 superblocks have not been created yet. */
1322 insert_block (block);
1324 /* End the scope containing the copied formal parameter variables
1325 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1326 here so that expand_end_bindings will not check for unused
1327 variables. That's already been checked for when the inlined
1328 function was defined. */
1329 expand_end_bindings (NULL_TREE, 1, 1);
1331 /* Must mark the line number note after inlined functions as a repeat, so
1332 that the test coverage code can avoid counting the call twice. This
1333 just tells the code to ignore the immediately following line note, since
1334 there already exists a copy of this note before the expanded inline call.
1335 This line number note is still needed for debugging though, so we can't
1336 delete it. */
1337 if (flag_test_coverage)
1338 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
1340 emit_line_note (input_filename, lineno);
1342 /* If the function returns a BLKmode object in a register, copy it
1343 out of the temp register into a BLKmode memory object. */
1344 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1345 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1346 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1348 if (structure_value_addr)
1350 target = gen_rtx_MEM (TYPE_MODE (type),
1351 memory_address (TYPE_MODE (type),
1352 structure_value_addr));
1353 MEM_SET_IN_STRUCT_P (target, 1);
1356 /* Make sure we free the things we explicitly allocated with xmalloc. */
1357 if (real_label_map)
1358 free (real_label_map);
1359 if (map)
1360 VARRAY_FREE (map->const_equiv_varray);
1361 inlining = inlining_previous;
1363 return target;
1366 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1367 push all of those decls and give each one the corresponding home. */
1369 static void
1370 integrate_parm_decls (args, map, arg_vector)
1371 tree args;
1372 struct inline_remap *map;
1373 rtvec arg_vector;
1375 register tree tail;
1376 register int i;
1378 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1380 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1381 TREE_TYPE (tail));
1382 rtx new_decl_rtl
1383 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1385 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1386 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1387 here, but that's going to require some more work. */
1388 /* DECL_INCOMING_RTL (decl) = ?; */
1389 /* These args would always appear unused, if not for this. */
1390 TREE_USED (decl) = 1;
1391 /* Prevent warning for shadowing with these. */
1392 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
1393 DECL_CONTEXT (decl) = current_function_decl;
1394 /* Fully instantiate the address with the equivalent form so that the
1395 debugging information contains the actual register, instead of the
1396 virtual register. Do this by not passing an insn to
1397 subst_constants. */
1398 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1399 apply_change_group ();
1400 DECL_RTL (decl) = new_decl_rtl;
1404 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1405 current function a tree of contexts isomorphic to the one that is given.
1407 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1408 registers used in the DECL_RTL field should be remapped. If it is zero,
1409 no mapping is necessary. */
1411 static tree
1412 integrate_decl_tree (let, map)
1413 tree let;
1414 struct inline_remap *map;
1416 tree t;
1417 tree new_block;
1418 tree *next;
1420 new_block = make_node (BLOCK);
1421 next = &BLOCK_VARS (new_block);
1423 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1425 tree d;
1427 push_obstacks_nochange ();
1428 saveable_allocation ();
1429 d = copy_and_set_decl_abstract_origin (t);
1430 pop_obstacks ();
1432 if (DECL_RTL (t) != 0)
1434 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map, 1);
1436 /* Fully instantiate the address with the equivalent form so that the
1437 debugging information contains the actual register, instead of the
1438 virtual register. Do this by not passing an insn to
1439 subst_constants. */
1440 subst_constants (&DECL_RTL (d), NULL_RTX, map, 1);
1441 apply_change_group ();
1443 /* These args would always appear unused, if not for this. */
1444 TREE_USED (d) = 1;
1446 if (DECL_LANG_SPECIFIC (d))
1447 copy_lang_decl (d);
1449 /* Set the context for the new declaration. */
1450 if (!DECL_CONTEXT (t))
1451 /* Globals stay global. */
1453 else if (DECL_CONTEXT (t) != map->fndecl)
1454 /* Things that weren't in the scope of the function we're
1455 inlining from aren't in the scope we're inlining too,
1456 either. */
1458 else if (TREE_STATIC (t))
1459 /* Function-scoped static variables should say in the original
1460 function. */
1462 else
1463 /* Ordinary automatic local variables are now in the scope of
1464 the new function. */
1465 DECL_CONTEXT (d) = current_function_decl;
1467 /* Add this declaration to the list of variables in the new
1468 block. */
1469 *next = d;
1470 next = &TREE_CHAIN (d);
1473 next = &BLOCK_SUBBLOCKS (new_block);
1474 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1476 *next = integrate_decl_tree (t, map);
1477 BLOCK_SUPERCONTEXT (*next) = new_block;
1478 next = &BLOCK_CHAIN (*next);
1481 TREE_USED (new_block) = TREE_USED (let);
1482 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1484 return new_block;
1487 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1488 except for those few rtx codes that are sharable.
1490 We always return an rtx that is similar to that incoming rtx, with the
1491 exception of possibly changing a REG to a SUBREG or vice versa. No
1492 rtl is ever emitted.
1494 If FOR_LHS is nonzero, if means we are processing something that will
1495 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1496 inlining since we need to be conservative in how it is set for
1497 such cases.
1499 Handle constants that need to be placed in the constant pool by
1500 calling `force_const_mem'. */
1503 copy_rtx_and_substitute (orig, map, for_lhs)
1504 register rtx orig;
1505 struct inline_remap *map;
1506 int for_lhs;
1508 register rtx copy, temp;
1509 register int i, j;
1510 register RTX_CODE code;
1511 register enum machine_mode mode;
1512 register const char *format_ptr;
1513 int regno;
1515 if (orig == 0)
1516 return 0;
1518 code = GET_CODE (orig);
1519 mode = GET_MODE (orig);
1521 switch (code)
1523 case REG:
1524 /* If the stack pointer register shows up, it must be part of
1525 stack-adjustments (*not* because we eliminated the frame pointer!).
1526 Small hard registers are returned as-is. Pseudo-registers
1527 go through their `reg_map'. */
1528 regno = REGNO (orig);
1529 if (regno <= LAST_VIRTUAL_REGISTER
1530 || (map->integrating
1531 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1533 /* Some hard registers are also mapped,
1534 but others are not translated. */
1535 if (map->reg_map[regno] != 0)
1536 return map->reg_map[regno];
1538 /* If this is the virtual frame pointer, make space in current
1539 function's stack frame for the stack frame of the inline function.
1541 Copy the address of this area into a pseudo. Map
1542 virtual_stack_vars_rtx to this pseudo and set up a constant
1543 equivalence for it to be the address. This will substitute the
1544 address into insns where it can be substituted and use the new
1545 pseudo where it can't. */
1546 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1548 rtx loc, seq;
1549 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1551 #ifdef FRAME_GROWS_DOWNWARD
1552 /* In this case, virtual_stack_vars_rtx points to one byte
1553 higher than the top of the frame area. So make sure we
1554 allocate a big enough chunk to keep the frame pointer
1555 aligned like a real one. */
1556 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1557 #endif
1558 start_sequence ();
1559 loc = assign_stack_temp (BLKmode, size, 1);
1560 loc = XEXP (loc, 0);
1561 #ifdef FRAME_GROWS_DOWNWARD
1562 /* In this case, virtual_stack_vars_rtx points to one byte
1563 higher than the top of the frame area. So compute the offset
1564 to one byte higher than our substitute frame. */
1565 loc = plus_constant (loc, size);
1566 #endif
1567 map->reg_map[regno] = temp
1568 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1570 #ifdef STACK_BOUNDARY
1571 mark_reg_pointer (map->reg_map[regno],
1572 STACK_BOUNDARY / BITS_PER_UNIT);
1573 #endif
1575 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1577 seq = gen_sequence ();
1578 end_sequence ();
1579 emit_insn_after (seq, map->insns_at_start);
1580 return temp;
1582 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1583 || (map->integrating
1584 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1585 == orig)))
1587 /* Do the same for a block to contain any arguments referenced
1588 in memory. */
1589 rtx loc, seq;
1590 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1592 start_sequence ();
1593 loc = assign_stack_temp (BLKmode, size, 1);
1594 loc = XEXP (loc, 0);
1595 /* When arguments grow downward, the virtual incoming
1596 args pointer points to the top of the argument block,
1597 so the remapped location better do the same. */
1598 #ifdef ARGS_GROW_DOWNWARD
1599 loc = plus_constant (loc, size);
1600 #endif
1601 map->reg_map[regno] = temp
1602 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1604 #ifdef STACK_BOUNDARY
1605 mark_reg_pointer (map->reg_map[regno],
1606 STACK_BOUNDARY / BITS_PER_UNIT);
1607 #endif
1609 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1611 seq = gen_sequence ();
1612 end_sequence ();
1613 emit_insn_after (seq, map->insns_at_start);
1614 return temp;
1616 else if (REG_FUNCTION_VALUE_P (orig))
1618 /* This is a reference to the function return value. If
1619 the function doesn't have a return value, error. If the
1620 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1621 if (map->inline_target == 0)
1622 /* Must be unrolling loops or replicating code if we
1623 reach here, so return the register unchanged. */
1624 return orig;
1625 else if (GET_MODE (map->inline_target) != BLKmode
1626 && mode != GET_MODE (map->inline_target))
1627 return gen_lowpart (mode, map->inline_target);
1628 else
1629 return map->inline_target;
1631 return orig;
1633 if (map->reg_map[regno] == NULL)
1635 map->reg_map[regno] = gen_reg_rtx (mode);
1636 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1637 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1638 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1639 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1641 if (map->regno_pointer_flag[regno])
1642 mark_reg_pointer (map->reg_map[regno],
1643 map->regno_pointer_align[regno]);
1645 return map->reg_map[regno];
1647 case SUBREG:
1648 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1649 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1650 if (GET_CODE (copy) == SUBREG)
1651 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1652 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1653 else if (GET_CODE (copy) == CONCAT)
1655 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1657 if (GET_MODE (retval) == GET_MODE (orig))
1658 return retval;
1659 else
1660 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1661 (SUBREG_WORD (orig) %
1662 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1663 / (unsigned) UNITS_PER_WORD)));
1665 else
1666 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1667 SUBREG_WORD (orig));
1669 case ADDRESSOF:
1670 copy = gen_rtx_ADDRESSOF (mode,
1671 copy_rtx_and_substitute (XEXP (orig, 0),
1672 map, for_lhs),
1673 0, ADDRESSOF_DECL(orig));
1674 regno = ADDRESSOF_REGNO (orig);
1675 if (map->reg_map[regno])
1676 regno = REGNO (map->reg_map[regno]);
1677 else if (regno > LAST_VIRTUAL_REGISTER)
1679 temp = XEXP (orig, 0);
1680 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1681 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1682 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1683 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1684 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1686 if (map->regno_pointer_flag[regno])
1687 mark_reg_pointer (map->reg_map[regno],
1688 map->regno_pointer_align[regno]);
1689 regno = REGNO (map->reg_map[regno]);
1691 ADDRESSOF_REGNO (copy) = regno;
1692 return copy;
1694 case USE:
1695 case CLOBBER:
1696 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1697 to (use foo) if the original insn didn't have a subreg.
1698 Removing the subreg distorts the VAX movstrhi pattern
1699 by changing the mode of an operand. */
1700 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1701 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1702 copy = SUBREG_REG (copy);
1703 return gen_rtx_fmt_e (code, VOIDmode, copy);
1705 case CODE_LABEL:
1706 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1707 = LABEL_PRESERVE_P (orig);
1708 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1710 case LABEL_REF:
1711 copy
1712 = gen_rtx_LABEL_REF
1713 (mode,
1714 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1715 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1717 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1719 /* The fact that this label was previously nonlocal does not mean
1720 it still is, so we must check if it is within the range of
1721 this function's labels. */
1722 LABEL_REF_NONLOCAL_P (copy)
1723 = (LABEL_REF_NONLOCAL_P (orig)
1724 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1725 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1727 /* If we have made a nonlocal label local, it means that this
1728 inlined call will be referring to our nonlocal goto handler.
1729 So make sure we create one for this block; we normally would
1730 not since this is not otherwise considered a "call". */
1731 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1732 function_call_count++;
1734 return copy;
1736 case PC:
1737 case CC0:
1738 case CONST_INT:
1739 return orig;
1741 case SYMBOL_REF:
1742 /* Symbols which represent the address of a label stored in the constant
1743 pool must be modified to point to a constant pool entry for the
1744 remapped label. Otherwise, symbols are returned unchanged. */
1745 if (CONSTANT_POOL_ADDRESS_P (orig))
1747 struct function *f = inlining ? inlining : current_function;
1748 rtx constant = get_pool_constant_for_function (f, orig);
1749 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1750 if (inlining)
1752 rtx temp = force_const_mem (const_mode,
1753 copy_rtx_and_substitute (constant,
1754 map, 0));
1756 #if 0
1757 /* Legitimizing the address here is incorrect.
1759 Since we had a SYMBOL_REF before, we can assume it is valid
1760 to have one in this position in the insn.
1762 Also, change_address may create new registers. These
1763 registers will not have valid reg_map entries. This can
1764 cause try_constants() to fail because assumes that all
1765 registers in the rtx have valid reg_map entries, and it may
1766 end up replacing one of these new registers with junk. */
1768 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1769 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1770 #endif
1772 temp = XEXP (temp, 0);
1774 #ifdef POINTERS_EXTEND_UNSIGNED
1775 if (GET_MODE (temp) != GET_MODE (orig))
1776 temp = convert_memory_address (GET_MODE (orig), temp);
1777 #endif
1778 return temp;
1780 else if (GET_CODE (constant) == LABEL_REF)
1781 return XEXP (force_const_mem
1782 (GET_MODE (orig),
1783 copy_rtx_and_substitute (constant, map, for_lhs)),
1786 else
1787 if (SYMBOL_REF_NEED_ADJUST (orig))
1789 eif_eh_map = map;
1790 return rethrow_symbol_map (orig,
1791 expand_inline_function_eh_labelmap);
1794 return orig;
1796 case CONST_DOUBLE:
1797 /* We have to make a new copy of this CONST_DOUBLE because don't want
1798 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1799 duplicate of a CONST_DOUBLE we have already seen. */
1800 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1802 REAL_VALUE_TYPE d;
1804 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1805 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
1807 else
1808 return immed_double_const (CONST_DOUBLE_LOW (orig),
1809 CONST_DOUBLE_HIGH (orig), VOIDmode);
1811 case CONST:
1812 /* Make new constant pool entry for a constant
1813 that was in the pool of the inline function. */
1814 if (RTX_INTEGRATED_P (orig))
1815 abort ();
1816 break;
1818 case ASM_OPERANDS:
1819 /* If a single asm insn contains multiple output operands
1820 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1821 We must make sure that the copied insn continues to share it. */
1822 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1824 copy = rtx_alloc (ASM_OPERANDS);
1825 copy->volatil = orig->volatil;
1826 XSTR (copy, 0) = XSTR (orig, 0);
1827 XSTR (copy, 1) = XSTR (orig, 1);
1828 XINT (copy, 2) = XINT (orig, 2);
1829 XVEC (copy, 3) = map->copy_asm_operands_vector;
1830 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1831 XSTR (copy, 5) = XSTR (orig, 5);
1832 XINT (copy, 6) = XINT (orig, 6);
1833 return copy;
1835 break;
1837 case CALL:
1838 /* This is given special treatment because the first
1839 operand of a CALL is a (MEM ...) which may get
1840 forced into a register for cse. This is undesirable
1841 if function-address cse isn't wanted or if we won't do cse. */
1842 #ifndef NO_FUNCTION_CSE
1843 if (! (optimize && ! flag_no_function_cse))
1844 #endif
1845 return
1846 gen_rtx_CALL
1847 (GET_MODE (orig),
1848 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
1849 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
1850 map, 0)),
1851 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
1852 break;
1854 #if 0
1855 /* Must be ifdefed out for loop unrolling to work. */
1856 case RETURN:
1857 abort ();
1858 #endif
1860 case SET:
1861 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1862 Adjust the setting by the offset of the area we made.
1863 If the nonlocal goto is into the current function,
1864 this will result in unnecessarily bad code, but should work. */
1865 if (SET_DEST (orig) == virtual_stack_vars_rtx
1866 || SET_DEST (orig) == virtual_incoming_args_rtx)
1868 /* In case a translation hasn't occurred already, make one now. */
1869 rtx equiv_reg;
1870 rtx equiv_loc;
1871 HOST_WIDE_INT loc_offset;
1873 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
1874 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
1875 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1876 REGNO (equiv_reg)).rtx;
1877 loc_offset
1878 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
1880 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
1881 force_operand
1882 (plus_constant
1883 (copy_rtx_and_substitute (SET_SRC (orig),
1884 map, 0),
1885 - loc_offset),
1886 NULL_RTX));
1888 else
1889 return gen_rtx_SET (VOIDmode,
1890 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
1891 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
1892 break;
1894 case MEM:
1895 if (inlining
1896 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
1897 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
1899 enum machine_mode const_mode
1900 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
1901 rtx constant
1902 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
1904 constant = copy_rtx_and_substitute (constant, map, 0);
1906 /* If this was an address of a constant pool entry that itself
1907 had to be placed in the constant pool, it might not be a
1908 valid address. So the recursive call might have turned it
1909 into a register. In that case, it isn't a constant any
1910 more, so return it. This has the potential of changing a
1911 MEM into a REG, but we'll assume that it safe. */
1912 if (! CONSTANT_P (constant))
1913 return constant;
1915 return validize_mem (force_const_mem (const_mode, constant));
1918 copy = rtx_alloc (MEM);
1919 PUT_MODE (copy, mode);
1920 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
1921 MEM_COPY_ATTRIBUTES (copy, orig);
1922 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
1923 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
1924 return copy;
1926 default:
1927 break;
1930 copy = rtx_alloc (code);
1931 PUT_MODE (copy, mode);
1932 copy->in_struct = orig->in_struct;
1933 copy->volatil = orig->volatil;
1934 copy->unchanging = orig->unchanging;
1936 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
1938 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
1940 switch (*format_ptr++)
1942 case '0':
1943 /* Copy this through the wide int field; that's safest. */
1944 X0WINT (copy, i) = X0WINT (orig, i);
1945 break;
1947 case 'e':
1948 XEXP (copy, i)
1949 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
1950 break;
1952 case 'u':
1953 /* Change any references to old-insns to point to the
1954 corresponding copied insns. */
1955 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
1956 break;
1958 case 'E':
1959 XVEC (copy, i) = XVEC (orig, i);
1960 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
1962 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1963 for (j = 0; j < XVECLEN (copy, i); j++)
1964 XVECEXP (copy, i, j)
1965 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
1966 map, for_lhs);
1968 break;
1970 case 'w':
1971 XWINT (copy, i) = XWINT (orig, i);
1972 break;
1974 case 'i':
1975 XINT (copy, i) = XINT (orig, i);
1976 break;
1978 case 's':
1979 XSTR (copy, i) = XSTR (orig, i);
1980 break;
1982 case 't':
1983 XTREE (copy, i) = XTREE (orig, i);
1984 break;
1986 default:
1987 abort ();
1991 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
1993 map->orig_asm_operands_vector = XVEC (orig, 3);
1994 map->copy_asm_operands_vector = XVEC (copy, 3);
1995 map->copy_asm_constraints_vector = XVEC (copy, 4);
1998 return copy;
2001 /* Substitute known constant values into INSN, if that is valid. */
2003 void
2004 try_constants (insn, map)
2005 rtx insn;
2006 struct inline_remap *map;
2008 int i;
2010 map->num_sets = 0;
2012 /* First try just updating addresses, then other things. This is
2013 important when we have something like the store of a constant
2014 into memory and we can update the memory address but the machine
2015 does not support a constant source. */
2016 subst_constants (&PATTERN (insn), insn, map, 1);
2017 apply_change_group ();
2018 subst_constants (&PATTERN (insn), insn, map, 0);
2019 apply_change_group ();
2021 /* Show we don't know the value of anything stored or clobbered. */
2022 note_stores (PATTERN (insn), mark_stores, NULL);
2023 map->last_pc_value = 0;
2024 #ifdef HAVE_cc0
2025 map->last_cc0_value = 0;
2026 #endif
2028 /* Set up any constant equivalences made in this insn. */
2029 for (i = 0; i < map->num_sets; i++)
2031 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2033 int regno = REGNO (map->equiv_sets[i].dest);
2035 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2036 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2037 /* Following clause is a hack to make case work where GNU C++
2038 reassigns a variable to make cse work right. */
2039 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2040 regno).rtx,
2041 map->equiv_sets[i].equiv))
2042 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2043 map->equiv_sets[i].equiv, map->const_age);
2045 else if (map->equiv_sets[i].dest == pc_rtx)
2046 map->last_pc_value = map->equiv_sets[i].equiv;
2047 #ifdef HAVE_cc0
2048 else if (map->equiv_sets[i].dest == cc0_rtx)
2049 map->last_cc0_value = map->equiv_sets[i].equiv;
2050 #endif
2054 /* Substitute known constants for pseudo regs in the contents of LOC,
2055 which are part of INSN.
2056 If INSN is zero, the substitution should always be done (this is used to
2057 update DECL_RTL).
2058 These changes are taken out by try_constants if the result is not valid.
2060 Note that we are more concerned with determining when the result of a SET
2061 is a constant, for further propagation, than actually inserting constants
2062 into insns; cse will do the latter task better.
2064 This function is also used to adjust address of items previously addressed
2065 via the virtual stack variable or virtual incoming arguments registers.
2067 If MEMONLY is nonzero, only make changes inside a MEM. */
2069 static void
2070 subst_constants (loc, insn, map, memonly)
2071 rtx *loc;
2072 rtx insn;
2073 struct inline_remap *map;
2074 int memonly;
2076 rtx x = *loc;
2077 register int i, j;
2078 register enum rtx_code code;
2079 register const char *format_ptr;
2080 int num_changes = num_validated_changes ();
2081 rtx new = 0;
2082 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2084 code = GET_CODE (x);
2086 switch (code)
2088 case PC:
2089 case CONST_INT:
2090 case CONST_DOUBLE:
2091 case SYMBOL_REF:
2092 case CONST:
2093 case LABEL_REF:
2094 case ADDRESS:
2095 return;
2097 #ifdef HAVE_cc0
2098 case CC0:
2099 if (! memonly)
2100 validate_change (insn, loc, map->last_cc0_value, 1);
2101 return;
2102 #endif
2104 case USE:
2105 case CLOBBER:
2106 /* The only thing we can do with a USE or CLOBBER is possibly do
2107 some substitutions in a MEM within it. */
2108 if (GET_CODE (XEXP (x, 0)) == MEM)
2109 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2110 return;
2112 case REG:
2113 /* Substitute for parms and known constants. Don't replace
2114 hard regs used as user variables with constants. */
2115 if (! memonly)
2117 int regno = REGNO (x);
2118 struct const_equiv_data *p;
2120 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2121 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2122 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2123 p->rtx != 0)
2124 && p->age >= map->const_age)
2125 validate_change (insn, loc, p->rtx, 1);
2127 return;
2129 case SUBREG:
2130 /* SUBREG applied to something other than a reg
2131 should be treated as ordinary, since that must
2132 be a special hack and we don't know how to treat it specially.
2133 Consider for example mulsidi3 in m68k.md.
2134 Ordinary SUBREG of a REG needs this special treatment. */
2135 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2137 rtx inner = SUBREG_REG (x);
2138 rtx new = 0;
2140 /* We can't call subst_constants on &SUBREG_REG (x) because any
2141 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2142 see what is inside, try to form the new SUBREG and see if that is
2143 valid. We handle two cases: extracting a full word in an
2144 integral mode and extracting the low part. */
2145 subst_constants (&inner, NULL_RTX, map, 0);
2147 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2148 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2149 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2150 new = operand_subword (inner, SUBREG_WORD (x), 0,
2151 GET_MODE (SUBREG_REG (x)));
2153 cancel_changes (num_changes);
2154 if (new == 0 && subreg_lowpart_p (x))
2155 new = gen_lowpart_common (GET_MODE (x), inner);
2157 if (new)
2158 validate_change (insn, loc, new, 1);
2160 return;
2162 break;
2164 case MEM:
2165 subst_constants (&XEXP (x, 0), insn, map, 0);
2167 /* If a memory address got spoiled, change it back. */
2168 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2169 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2170 cancel_changes (num_changes);
2171 return;
2173 case SET:
2175 /* Substitute constants in our source, and in any arguments to a
2176 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2177 itself. */
2178 rtx *dest_loc = &SET_DEST (x);
2179 rtx dest = *dest_loc;
2180 rtx src, tem;
2182 subst_constants (&SET_SRC (x), insn, map, memonly);
2183 src = SET_SRC (x);
2185 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2186 || GET_CODE (*dest_loc) == SUBREG
2187 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2189 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2191 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2192 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2194 dest_loc = &XEXP (*dest_loc, 0);
2197 /* Do substitute in the address of a destination in memory. */
2198 if (GET_CODE (*dest_loc) == MEM)
2199 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2201 /* Check for the case of DEST a SUBREG, both it and the underlying
2202 register are less than one word, and the SUBREG has the wider mode.
2203 In the case, we are really setting the underlying register to the
2204 source converted to the mode of DEST. So indicate that. */
2205 if (GET_CODE (dest) == SUBREG
2206 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2207 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2208 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2209 <= GET_MODE_SIZE (GET_MODE (dest)))
2210 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2211 src)))
2212 src = tem, dest = SUBREG_REG (dest);
2214 /* If storing a recognizable value save it for later recording. */
2215 if ((map->num_sets < MAX_RECOG_OPERANDS)
2216 && (CONSTANT_P (src)
2217 || (GET_CODE (src) == REG
2218 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2219 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2220 || (GET_CODE (src) == PLUS
2221 && GET_CODE (XEXP (src, 0)) == REG
2222 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2223 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2224 && CONSTANT_P (XEXP (src, 1)))
2225 || GET_CODE (src) == COMPARE
2226 #ifdef HAVE_cc0
2227 || dest == cc0_rtx
2228 #endif
2229 || (dest == pc_rtx
2230 && (src == pc_rtx || GET_CODE (src) == RETURN
2231 || GET_CODE (src) == LABEL_REF))))
2233 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2234 it will cause us to save the COMPARE with any constants
2235 substituted, which is what we want for later. */
2236 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2237 map->equiv_sets[map->num_sets++].dest = dest;
2240 return;
2242 default:
2243 break;
2246 format_ptr = GET_RTX_FORMAT (code);
2248 /* If the first operand is an expression, save its mode for later. */
2249 if (*format_ptr == 'e')
2250 op0_mode = GET_MODE (XEXP (x, 0));
2252 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2254 switch (*format_ptr++)
2256 case '0':
2257 break;
2259 case 'e':
2260 if (XEXP (x, i))
2261 subst_constants (&XEXP (x, i), insn, map, memonly);
2262 break;
2264 case 'u':
2265 case 'i':
2266 case 's':
2267 case 'w':
2268 case 't':
2269 break;
2271 case 'E':
2272 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2273 for (j = 0; j < XVECLEN (x, i); j++)
2274 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2276 break;
2278 default:
2279 abort ();
2283 /* If this is a commutative operation, move a constant to the second
2284 operand unless the second operand is already a CONST_INT. */
2285 if (! memonly
2286 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2287 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2289 rtx tem = XEXP (x, 0);
2290 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2291 validate_change (insn, &XEXP (x, 1), tem, 1);
2294 /* Simplify the expression in case we put in some constants. */
2295 if (! memonly)
2296 switch (GET_RTX_CLASS (code))
2298 case '1':
2299 if (op0_mode == MAX_MACHINE_MODE)
2300 abort ();
2301 new = simplify_unary_operation (code, GET_MODE (x),
2302 XEXP (x, 0), op0_mode);
2303 break;
2305 case '<':
2307 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2309 if (op_mode == VOIDmode)
2310 op_mode = GET_MODE (XEXP (x, 1));
2311 new = simplify_relational_operation (code, op_mode,
2312 XEXP (x, 0), XEXP (x, 1));
2313 #ifdef FLOAT_STORE_FLAG_VALUE
2314 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2315 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2316 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2317 GET_MODE (x)));
2318 #endif
2319 break;
2322 case '2':
2323 case 'c':
2324 new = simplify_binary_operation (code, GET_MODE (x),
2325 XEXP (x, 0), XEXP (x, 1));
2326 break;
2328 case 'b':
2329 case '3':
2330 if (op0_mode == MAX_MACHINE_MODE)
2331 abort ();
2333 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2334 XEXP (x, 0), XEXP (x, 1),
2335 XEXP (x, 2));
2336 break;
2339 if (new)
2340 validate_change (insn, loc, new, 1);
2343 /* Show that register modified no longer contain known constants. We are
2344 called from note_stores with parts of the new insn. */
2346 void
2347 mark_stores (dest, x, data)
2348 rtx dest;
2349 rtx x ATTRIBUTE_UNUSED;
2350 void *data ATTRIBUTE_UNUSED;
2352 int regno = -1;
2353 enum machine_mode mode = VOIDmode;
2355 /* DEST is always the innermost thing set, except in the case of
2356 SUBREGs of hard registers. */
2358 if (GET_CODE (dest) == REG)
2359 regno = REGNO (dest), mode = GET_MODE (dest);
2360 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2362 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2363 mode = GET_MODE (SUBREG_REG (dest));
2366 if (regno >= 0)
2368 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2369 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2370 int i;
2372 /* Ignore virtual stack var or virtual arg register since those
2373 are handled separately. */
2374 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
2375 && regno != VIRTUAL_STACK_VARS_REGNUM)
2376 for (i = regno; i <= last_reg; i++)
2377 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2378 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2382 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2383 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2384 that it points to the node itself, thus indicating that the node is its
2385 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2386 the given node is NULL, recursively descend the decl/block tree which
2387 it is the root of, and for each other ..._DECL or BLOCK node contained
2388 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2389 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2390 values to point to themselves. */
2392 static void
2393 set_block_origin_self (stmt)
2394 register tree stmt;
2396 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2398 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2401 register tree local_decl;
2403 for (local_decl = BLOCK_VARS (stmt);
2404 local_decl != NULL_TREE;
2405 local_decl = TREE_CHAIN (local_decl))
2406 set_decl_origin_self (local_decl); /* Potential recursion. */
2410 register tree subblock;
2412 for (subblock = BLOCK_SUBBLOCKS (stmt);
2413 subblock != NULL_TREE;
2414 subblock = BLOCK_CHAIN (subblock))
2415 set_block_origin_self (subblock); /* Recurse. */
2420 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2421 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2422 node to so that it points to the node itself, thus indicating that the
2423 node represents its own (abstract) origin. Additionally, if the
2424 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2425 the decl/block tree of which the given node is the root of, and for
2426 each other ..._DECL or BLOCK node contained therein whose
2427 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2428 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2429 point to themselves. */
2431 static void
2432 set_decl_origin_self (decl)
2433 register tree decl;
2435 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2437 DECL_ABSTRACT_ORIGIN (decl) = decl;
2438 if (TREE_CODE (decl) == FUNCTION_DECL)
2440 register tree arg;
2442 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2443 DECL_ABSTRACT_ORIGIN (arg) = arg;
2444 if (DECL_INITIAL (decl) != NULL_TREE
2445 && DECL_INITIAL (decl) != error_mark_node)
2446 set_block_origin_self (DECL_INITIAL (decl));
2451 /* Given a pointer to some BLOCK node, and a boolean value to set the
2452 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2453 the given block, and for all local decls and all local sub-blocks
2454 (recursively) which are contained therein. */
2456 static void
2457 set_block_abstract_flags (stmt, setting)
2458 register tree stmt;
2459 register int setting;
2461 register tree local_decl;
2462 register tree subblock;
2464 BLOCK_ABSTRACT (stmt) = setting;
2466 for (local_decl = BLOCK_VARS (stmt);
2467 local_decl != NULL_TREE;
2468 local_decl = TREE_CHAIN (local_decl))
2469 set_decl_abstract_flags (local_decl, setting);
2471 for (subblock = BLOCK_SUBBLOCKS (stmt);
2472 subblock != NULL_TREE;
2473 subblock = BLOCK_CHAIN (subblock))
2474 set_block_abstract_flags (subblock, setting);
2477 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2478 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2479 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2480 set the abstract flags for all of the parameters, local vars, local
2481 blocks and sub-blocks (recursively) to the same setting. */
2483 void
2484 set_decl_abstract_flags (decl, setting)
2485 register tree decl;
2486 register int setting;
2488 DECL_ABSTRACT (decl) = setting;
2489 if (TREE_CODE (decl) == FUNCTION_DECL)
2491 register tree arg;
2493 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2494 DECL_ABSTRACT (arg) = setting;
2495 if (DECL_INITIAL (decl) != NULL_TREE
2496 && DECL_INITIAL (decl) != error_mark_node)
2497 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2501 /* Output the assembly language code for the function FNDECL
2502 from its DECL_SAVED_INSNS. Used for inline functions that are output
2503 at end of compilation instead of where they came in the source. */
2505 void
2506 output_inline_function (fndecl)
2507 tree fndecl;
2509 struct function *curf = current_function;
2510 struct function *f = DECL_SAVED_INSNS (fndecl);
2512 current_function = f;
2513 current_function_decl = fndecl;
2514 clear_emit_caches ();
2516 /* Things we allocate from here on are part of this function, not
2517 permanent. */
2518 temporary_allocation ();
2520 set_new_last_label_num (f->inl_max_label_num);
2522 /* We must have already output DWARF debugging information for the
2523 original (abstract) inline function declaration/definition, so
2524 we want to make sure that the debugging information we generate
2525 for this special instance of the inline function refers back to
2526 the information we already generated. To make sure that happens,
2527 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2528 node (and for all of the local ..._DECL nodes which are its children)
2529 so that they all point to themselves. */
2531 set_decl_origin_self (fndecl);
2533 /* We're not deferring this any longer. */
2534 DECL_DEFER_OUTPUT (fndecl) = 0;
2536 /* We can't inline this anymore. */
2537 f->inlinable = 0;
2538 DECL_INLINE (fndecl) = 0;
2540 /* Compile this function all the way down to assembly code. */
2541 rest_of_compilation (fndecl);
2543 current_function = curf;
2544 current_function_decl = curf ? curf->decl : 0;