* include/bits/cpp_type_traits.h: Fix typos. Adjust formatting.
[official-gcc.git] / gcc / integrate.c
blob34aa4bff971931d38dc85e4c64078a2751d80a19
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "insn-flags.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
69 #endif
71 static rtvec initialize_for_inline PARAMS ((tree));
72 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
74 rtvec));
75 static tree integrate_decl_tree PARAMS ((tree,
76 struct inline_remap *));
77 static void subst_constants PARAMS ((rtx *, rtx,
78 struct inline_remap *, int));
79 static void set_block_origin_self PARAMS ((tree));
80 static void set_block_abstract_flags PARAMS ((tree, int));
81 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
82 rtx));
83 void set_decl_abstract_flags PARAMS ((tree, int));
84 static rtx expand_inline_function_eh_labelmap PARAMS ((rtx));
85 static void mark_stores PARAMS ((rtx, rtx, void *));
86 static void save_parm_insns PARAMS ((rtx, rtx));
87 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
88 rtx));
89 static int compare_blocks PARAMS ((const PTR, const PTR));
90 static int find_block PARAMS ((const PTR, const PTR));
92 /* The maximum number of instructions accepted for inlining a
93 function. Increasing values mean more agressive inlining.
94 This affects currently only functions explicitly marked as
95 inline (or methods defined within the class definition for C++).
96 The default value of 10000 is arbitrary but high to match the
97 previously unlimited gcc capabilities. */
99 int inline_max_insns = 10000;
101 /* Used by copy_rtx_and_substitute; this indicates whether the function is
102 called for the purpose of inlining or some other purpose (i.e. loop
103 unrolling). This affects how constant pool references are handled.
104 This variable contains the FUNCTION_DECL for the inlined function. */
105 static struct function *inlining = 0;
107 /* Returns the Ith entry in the label_map contained in MAP. If the
108 Ith entry has not yet been set, return a fresh label. This function
109 performs a lazy initialization of label_map, thereby avoiding huge memory
110 explosions when the label_map gets very large. */
113 get_label_from_map (map, i)
114 struct inline_remap *map;
115 int i;
117 rtx x = map->label_map[i];
119 if (x == NULL_RTX)
120 x = map->label_map[i] = gen_label_rtx ();
122 return x;
125 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
126 is safe and reasonable to integrate into other functions.
127 Nonzero means value is a warning msgid with a single %s
128 for the function's name. */
130 const char *
131 function_cannot_inline_p (fndecl)
132 register tree fndecl;
134 register rtx insn;
135 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
137 /* For functions marked as inline increase the maximum size to
138 inline_max_insns (-finline-limit-<n>). For regular functions
139 use the limit given by INTEGRATE_THRESHOLD. */
141 int max_insns = (DECL_INLINE (fndecl))
142 ? (inline_max_insns
143 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
144 : INTEGRATE_THRESHOLD (fndecl);
146 register int ninsns = 0;
147 register tree parms;
148 rtx result;
150 /* No inlines with varargs. */
151 if ((last && TREE_VALUE (last) != void_type_node)
152 || current_function_varargs)
153 return N_("varargs function cannot be inline");
155 if (current_function_calls_alloca)
156 return N_("function using alloca cannot be inline");
158 if (current_function_calls_setjmp)
159 return N_("function using setjmp cannot be inline");
161 if (current_function_contains_functions)
162 return N_("function with nested functions cannot be inline");
164 if (forced_labels)
165 return
166 N_("function with label addresses used in initializers cannot inline");
168 if (current_function_cannot_inline)
169 return current_function_cannot_inline;
171 /* If its not even close, don't even look. */
172 if (get_max_uid () > 3 * max_insns)
173 return N_("function too large to be inline");
175 #if 0
176 /* Don't inline functions which do not specify a function prototype and
177 have BLKmode argument or take the address of a parameter. */
178 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
180 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
181 TREE_ADDRESSABLE (parms) = 1;
182 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
183 return N_("no prototype, and parameter address used; cannot be inline");
185 #endif
187 /* We can't inline functions that return structures
188 the old-fashioned PCC way, copying into a static block. */
189 if (current_function_returns_pcc_struct)
190 return N_("inline functions not supported for this return value type");
192 /* We can't inline functions that return structures of varying size. */
193 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
194 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
195 return N_("function with varying-size return value cannot be inline");
197 /* Cannot inline a function with a varying size argument or one that
198 receives a transparent union. */
199 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
201 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
202 return N_("function with varying-size parameter cannot be inline");
203 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
204 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
205 return N_("function with transparent unit parameter cannot be inline");
208 if (get_max_uid () > max_insns)
210 for (ninsns = 0, insn = get_first_nonparm_insn ();
211 insn && ninsns < max_insns;
212 insn = NEXT_INSN (insn))
213 if (INSN_P (insn))
214 ninsns++;
216 if (ninsns >= max_insns)
217 return N_("function too large to be inline");
220 /* We will not inline a function which uses computed goto. The addresses of
221 its local labels, which may be tucked into global storage, are of course
222 not constant across instantiations, which causes unexpected behaviour. */
223 if (current_function_has_computed_jump)
224 return N_("function with computed jump cannot inline");
226 /* We cannot inline a nested function that jumps to a nonlocal label. */
227 if (current_function_has_nonlocal_goto)
228 return N_("function with nonlocal goto cannot be inline");
230 /* This is a hack, until the inliner is taught about eh regions at
231 the start of the function. */
232 for (insn = get_insns ();
233 insn
234 && ! (GET_CODE (insn) == NOTE
235 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
236 insn = NEXT_INSN (insn))
238 if (insn && GET_CODE (insn) == NOTE
239 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
240 return N_("function with complex parameters cannot be inline");
243 /* We can't inline functions that return a PARALLEL rtx. */
244 result = DECL_RTL (DECL_RESULT (fndecl));
245 if (result && GET_CODE (result) == PARALLEL)
246 return N_("inline functions not supported for this return value type");
248 /* If the function has a target specific attribute attached to it,
249 then we assume that we should not inline it. This can be overriden
250 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
251 if (DECL_MACHINE_ATTRIBUTES (fndecl)
252 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
253 return N_("function with target specific attribute(s) cannot be inlined");
255 return NULL;
258 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
259 Zero for a reg that isn't a parm's home.
260 Only reg numbers less than max_parm_reg are mapped here. */
261 static tree *parmdecl_map;
263 /* In save_for_inline, nonzero if past the parm-initialization insns. */
264 static int in_nonparm_insns;
266 /* Subroutine for `save_for_inline'. Performs initialization
267 needed to save FNDECL's insns and info for future inline expansion. */
269 static rtvec
270 initialize_for_inline (fndecl)
271 tree fndecl;
273 int i;
274 rtvec arg_vector;
275 tree parms;
277 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
278 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
279 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
281 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
282 parms;
283 parms = TREE_CHAIN (parms), i++)
285 rtx p = DECL_RTL (parms);
287 /* If we have (mem (addressof (mem ...))), use the inner MEM since
288 otherwise the copy_rtx call below will not unshare the MEM since
289 it shares ADDRESSOF. */
290 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
291 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
292 p = XEXP (XEXP (p, 0), 0);
294 RTVEC_ELT (arg_vector, i) = p;
296 if (GET_CODE (p) == REG)
297 parmdecl_map[REGNO (p)] = parms;
298 else if (GET_CODE (p) == CONCAT)
300 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
301 rtx pimag = gen_imagpart (GET_MODE (preal), p);
303 if (GET_CODE (preal) == REG)
304 parmdecl_map[REGNO (preal)] = parms;
305 if (GET_CODE (pimag) == REG)
306 parmdecl_map[REGNO (pimag)] = parms;
309 /* This flag is cleared later
310 if the function ever modifies the value of the parm. */
311 TREE_READONLY (parms) = 1;
314 return arg_vector;
317 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
318 originally was in the FROM_FN, but now it will be in the
319 TO_FN. */
321 tree
322 copy_decl_for_inlining (decl, from_fn, to_fn)
323 tree decl;
324 tree from_fn;
325 tree to_fn;
327 tree copy;
329 /* Copy the declaration. */
330 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
332 /* For a parameter, we must make an equivalent VAR_DECL, not a
333 new PARM_DECL. */
334 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
335 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
336 TREE_READONLY (copy) = TREE_READONLY (decl);
337 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
339 else
341 copy = copy_node (decl);
342 if (DECL_LANG_SPECIFIC (copy))
343 copy_lang_decl (copy);
345 /* TREE_ADDRESSABLE isn't used to indicate that a label's
346 address has been taken; it's for internal bookkeeping in
347 expand_goto_internal. */
348 if (TREE_CODE (copy) == LABEL_DECL)
349 TREE_ADDRESSABLE (copy) = 0;
352 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
353 declaration inspired this copy. */
354 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
356 /* The new variable/label has no RTL, yet. */
357 DECL_RTL (copy) = NULL_RTX;
359 /* These args would always appear unused, if not for this. */
360 TREE_USED (copy) = 1;
362 /* Set the context for the new declaration. */
363 if (!DECL_CONTEXT (decl))
364 /* Globals stay global. */
366 else if (DECL_CONTEXT (decl) != from_fn)
367 /* Things that weren't in the scope of the function we're inlining
368 from aren't in the scope we're inlining too, either. */
370 else if (TREE_STATIC (decl))
371 /* Function-scoped static variables should say in the original
372 function. */
374 else
375 /* Ordinary automatic local variables are now in the scope of the
376 new function. */
377 DECL_CONTEXT (copy) = to_fn;
379 return copy;
382 /* Make the insns and PARM_DECLs of the current function permanent
383 and record other information in DECL_SAVED_INSNS to allow inlining
384 of this function in subsequent calls.
386 This routine need not copy any insns because we are not going
387 to immediately compile the insns in the insn chain. There
388 are two cases when we would compile the insns for FNDECL:
389 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
390 be output at the end of other compilation, because somebody took
391 its address. In the first case, the insns of FNDECL are copied
392 as it is expanded inline, so FNDECL's saved insns are not
393 modified. In the second case, FNDECL is used for the last time,
394 so modifying the rtl is not a problem.
396 We don't have to worry about FNDECL being inline expanded by
397 other functions which are written at the end of compilation
398 because flag_no_inline is turned on when we begin writing
399 functions at the end of compilation. */
401 void
402 save_for_inline (fndecl)
403 tree fndecl;
405 rtx insn;
406 rtvec argvec;
407 rtx first_nonparm_insn;
409 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
410 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
411 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
412 for the parms, prior to elimination of virtual registers.
413 These values are needed for substituting parms properly. */
415 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
417 /* Make and emit a return-label if we have not already done so. */
419 if (return_label == 0)
421 return_label = gen_label_rtx ();
422 emit_label (return_label);
425 argvec = initialize_for_inline (fndecl);
427 /* If there are insns that copy parms from the stack into pseudo registers,
428 those insns are not copied. `expand_inline_function' must
429 emit the correct code to handle such things. */
431 insn = get_insns ();
432 if (GET_CODE (insn) != NOTE)
433 abort ();
435 /* Get the insn which signals the end of parameter setup code. */
436 first_nonparm_insn = get_first_nonparm_insn ();
438 /* Now just scan the chain of insns to see what happens to our
439 PARM_DECLs. If a PARM_DECL is used but never modified, we
440 can substitute its rtl directly when expanding inline (and
441 perform constant folding when its incoming value is constant).
442 Otherwise, we have to copy its value into a new register and track
443 the new register's life. */
444 in_nonparm_insns = 0;
445 save_parm_insns (insn, first_nonparm_insn);
447 cfun->inl_max_label_num = max_label_num ();
448 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
449 cfun->original_arg_vector = argvec;
450 cfun->original_decl_initial = DECL_INITIAL (fndecl);
451 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
452 DECL_SAVED_INSNS (fndecl) = cfun;
454 /* Clean up. */
455 free (parmdecl_map);
458 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
459 PARM_DECL is used but never modified, we can substitute its rtl directly
460 when expanding inline (and perform constant folding when its incoming
461 value is constant). Otherwise, we have to copy its value into a new
462 register and track the new register's life. */
464 static void
465 save_parm_insns (insn, first_nonparm_insn)
466 rtx insn;
467 rtx first_nonparm_insn;
469 if (insn == NULL_RTX)
470 return;
472 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
474 if (insn == first_nonparm_insn)
475 in_nonparm_insns = 1;
477 if (INSN_P (insn))
479 /* Record what interesting things happen to our parameters. */
480 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
482 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
483 three attached sequences: normal call, sibling call and tail
484 recursion. */
485 if (GET_CODE (insn) == CALL_INSN
486 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
488 int i;
490 for (i = 0; i < 3; i++)
491 save_parm_insns (XEXP (PATTERN (insn), i),
492 first_nonparm_insn);
498 /* Note whether a parameter is modified or not. */
500 static void
501 note_modified_parmregs (reg, x, data)
502 rtx reg;
503 rtx x ATTRIBUTE_UNUSED;
504 void *data ATTRIBUTE_UNUSED;
506 if (GET_CODE (reg) == REG && in_nonparm_insns
507 && REGNO (reg) < max_parm_reg
508 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
509 && parmdecl_map[REGNO (reg)] != 0)
510 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
513 /* Unfortunately, we need a global copy of const_equiv map for communication
514 with a function called from note_stores. Be *very* careful that this
515 is used properly in the presence of recursion. */
517 varray_type global_const_equiv_varray;
519 #define FIXED_BASE_PLUS_P(X) \
520 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
521 && GET_CODE (XEXP (X, 0)) == REG \
522 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
523 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
525 /* Called to set up a mapping for the case where a parameter is in a
526 register. If it is read-only and our argument is a constant, set up the
527 constant equivalence.
529 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
530 if it is a register.
532 Also, don't allow hard registers here; they might not be valid when
533 substituted into insns. */
534 static void
535 process_reg_param (map, loc, copy)
536 struct inline_remap *map;
537 rtx loc, copy;
539 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
540 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
541 && ! REG_USERVAR_P (copy))
542 || (GET_CODE (copy) == REG
543 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
545 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
546 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
547 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
548 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
549 copy = temp;
551 map->reg_map[REGNO (loc)] = copy;
554 /* Used by duplicate_eh_handlers to map labels for the exception table */
555 static struct inline_remap *eif_eh_map;
557 static rtx
558 expand_inline_function_eh_labelmap (label)
559 rtx label;
561 int index = CODE_LABEL_NUMBER (label);
562 return get_label_from_map (eif_eh_map, index);
565 /* Compare two BLOCKs for qsort. The key we sort on is the
566 BLOCK_ABSTRACT_ORIGIN of the blocks. */
568 static int
569 compare_blocks (v1, v2)
570 const PTR v1;
571 const PTR v2;
573 tree b1 = *((const tree *) v1);
574 tree b2 = *((const tree *) v2);
576 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
577 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
580 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
581 an original block; the second to a remapped equivalent. */
583 static int
584 find_block (v1, v2)
585 const PTR v1;
586 const PTR v2;
588 const union tree_node *b1 = (const union tree_node *) v1;
589 tree b2 = *((const tree *) v2);
591 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
594 /* Integrate the procedure defined by FNDECL. Note that this function
595 may wind up calling itself. Since the static variables are not
596 reentrant, we do not assign them until after the possibility
597 of recursion is eliminated.
599 If IGNORE is nonzero, do not produce a value.
600 Otherwise store the value in TARGET if it is nonzero and that is convenient.
602 Value is:
603 (rtx)-1 if we could not substitute the function
604 0 if we substituted it and it does not produce a value
605 else an rtx for where the value is stored. */
608 expand_inline_function (fndecl, parms, target, ignore, type,
609 structure_value_addr)
610 tree fndecl, parms;
611 rtx target;
612 int ignore;
613 tree type;
614 rtx structure_value_addr;
616 struct function *inlining_previous;
617 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
618 tree formal, actual, block;
619 rtx parm_insns = inl_f->emit->x_first_insn;
620 rtx insns = (inl_f->inl_last_parm_insn
621 ? NEXT_INSN (inl_f->inl_last_parm_insn)
622 : parm_insns);
623 tree *arg_trees;
624 rtx *arg_vals;
625 int max_regno;
626 register int i;
627 int min_labelno = inl_f->emit->x_first_label_num;
628 int max_labelno = inl_f->inl_max_label_num;
629 int nargs;
630 rtx loc;
631 rtx stack_save = 0;
632 rtx temp;
633 struct inline_remap *map = 0;
634 #ifdef HAVE_cc0
635 rtx cc0_insn = 0;
636 #endif
637 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
638 rtx static_chain_value = 0;
639 int inl_max_uid;
641 /* The pointer used to track the true location of the memory used
642 for MAP->LABEL_MAP. */
643 rtx *real_label_map = 0;
645 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
646 max_regno = inl_f->emit->x_reg_rtx_no + 3;
647 if (max_regno < FIRST_PSEUDO_REGISTER)
648 abort ();
650 /* Pull out the decl for the function definition; fndecl may be a
651 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
652 fndecl = inl_f->decl;
654 nargs = list_length (DECL_ARGUMENTS (fndecl));
656 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
657 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
659 /* Check that the parms type match and that sufficient arguments were
660 passed. Since the appropriate conversions or default promotions have
661 already been applied, the machine modes should match exactly. */
663 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
664 formal;
665 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
667 tree arg;
668 enum machine_mode mode;
670 if (actual == 0)
671 return (rtx) (HOST_WIDE_INT) -1;
673 arg = TREE_VALUE (actual);
674 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
676 if (mode != TYPE_MODE (TREE_TYPE (arg))
677 /* If they are block mode, the types should match exactly.
678 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
679 which could happen if the parameter has incomplete type. */
680 || (mode == BLKmode
681 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
682 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
683 return (rtx) (HOST_WIDE_INT) -1;
686 /* Extra arguments are valid, but will be ignored below, so we must
687 evaluate them here for side-effects. */
688 for (; actual; actual = TREE_CHAIN (actual))
689 expand_expr (TREE_VALUE (actual), const0_rtx,
690 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
692 /* Expand the function arguments. Do this first so that any
693 new registers get created before we allocate the maps. */
695 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
696 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
698 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
699 formal;
700 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
702 /* Actual parameter, converted to the type of the argument within the
703 function. */
704 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
705 /* Mode of the variable used within the function. */
706 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
707 int invisiref = 0;
709 arg_trees[i] = arg;
710 loc = RTVEC_ELT (arg_vector, i);
712 /* If this is an object passed by invisible reference, we copy the
713 object into a stack slot and save its address. If this will go
714 into memory, we do nothing now. Otherwise, we just expand the
715 argument. */
716 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
717 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
719 rtx stack_slot
720 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
721 int_size_in_bytes (TREE_TYPE (arg)), 1);
722 MEM_SET_IN_STRUCT_P (stack_slot,
723 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
725 store_expr (arg, stack_slot, 0);
727 arg_vals[i] = XEXP (stack_slot, 0);
728 invisiref = 1;
730 else if (GET_CODE (loc) != MEM)
732 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
733 /* The mode if LOC and ARG can differ if LOC was a variable
734 that had its mode promoted via PROMOTED_MODE. */
735 arg_vals[i] = convert_modes (GET_MODE (loc),
736 TYPE_MODE (TREE_TYPE (arg)),
737 expand_expr (arg, NULL_RTX, mode,
738 EXPAND_SUM),
739 TREE_UNSIGNED (TREE_TYPE (formal)));
740 else
741 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
743 else
744 arg_vals[i] = 0;
746 if (arg_vals[i] != 0
747 && (! TREE_READONLY (formal)
748 /* If the parameter is not read-only, copy our argument through
749 a register. Also, we cannot use ARG_VALS[I] if it overlaps
750 TARGET in any way. In the inline function, they will likely
751 be two different pseudos, and `safe_from_p' will make all
752 sorts of smart assumptions about their not conflicting.
753 But if ARG_VALS[I] overlaps TARGET, these assumptions are
754 wrong, so put ARG_VALS[I] into a fresh register.
755 Don't worry about invisible references, since their stack
756 temps will never overlap the target. */
757 || (target != 0
758 && ! invisiref
759 && (GET_CODE (arg_vals[i]) == REG
760 || GET_CODE (arg_vals[i]) == SUBREG
761 || GET_CODE (arg_vals[i]) == MEM)
762 && reg_overlap_mentioned_p (arg_vals[i], target))
763 /* ??? We must always copy a SUBREG into a REG, because it might
764 get substituted into an address, and not all ports correctly
765 handle SUBREGs in addresses. */
766 || (GET_CODE (arg_vals[i]) == SUBREG)))
767 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
769 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
770 && POINTER_TYPE_P (TREE_TYPE (formal)))
771 mark_reg_pointer (arg_vals[i],
772 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
775 /* Allocate the structures we use to remap things. */
777 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
778 map->fndecl = fndecl;
780 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
781 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
783 /* We used to use alloca here, but the size of what it would try to
784 allocate would occasionally cause it to exceed the stack limit and
785 cause unpredictable core dumps. */
786 real_label_map
787 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
788 map->label_map = real_label_map;
790 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
791 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
792 map->min_insnno = 0;
793 map->max_insnno = inl_max_uid;
795 map->integrating = 1;
797 /* const_equiv_varray maps pseudos in our routine to constants, so
798 it needs to be large enough for all our pseudos. This is the
799 number we are currently using plus the number in the called
800 routine, plus 15 for each arg, five to compute the virtual frame
801 pointer, and five for the return value. This should be enough
802 for most cases. We do not reference entries outside the range of
803 the map.
805 ??? These numbers are quite arbitrary and were obtained by
806 experimentation. At some point, we should try to allocate the
807 table after all the parameters are set up so we an more accurately
808 estimate the number of pseudos we will need. */
810 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
811 (max_reg_num ()
812 + (max_regno - FIRST_PSEUDO_REGISTER)
813 + 15 * nargs
814 + 10),
815 "expand_inline_function");
816 map->const_age = 0;
818 /* Record the current insn in case we have to set up pointers to frame
819 and argument memory blocks. If there are no insns yet, add a dummy
820 insn that can be used as an insertion point. */
821 map->insns_at_start = get_last_insn ();
822 if (map->insns_at_start == 0)
823 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
825 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
826 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
828 /* Update the outgoing argument size to allow for those in the inlined
829 function. */
830 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
831 current_function_outgoing_args_size = inl_f->outgoing_args_size;
833 /* If the inline function needs to make PIC references, that means
834 that this function's PIC offset table must be used. */
835 if (inl_f->uses_pic_offset_table)
836 current_function_uses_pic_offset_table = 1;
838 /* If this function needs a context, set it up. */
839 if (inl_f->needs_context)
840 static_chain_value = lookup_static_chain (fndecl);
842 if (GET_CODE (parm_insns) == NOTE
843 && NOTE_LINE_NUMBER (parm_insns) > 0)
845 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
846 NOTE_LINE_NUMBER (parm_insns));
847 if (note)
848 RTX_INTEGRATED_P (note) = 1;
851 /* Process each argument. For each, set up things so that the function's
852 reference to the argument will refer to the argument being passed.
853 We only replace REG with REG here. Any simplifications are done
854 via const_equiv_map.
856 We make two passes: In the first, we deal with parameters that will
857 be placed into registers, since we need to ensure that the allocated
858 register number fits in const_equiv_map. Then we store all non-register
859 parameters into their memory location. */
861 /* Don't try to free temp stack slots here, because we may put one of the
862 parameters into a temp stack slot. */
864 for (i = 0; i < nargs; i++)
866 rtx copy = arg_vals[i];
868 loc = RTVEC_ELT (arg_vector, i);
870 /* There are three cases, each handled separately. */
871 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
872 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
874 /* This must be an object passed by invisible reference (it could
875 also be a variable-sized object, but we forbid inlining functions
876 with variable-sized arguments). COPY is the address of the
877 actual value (this computation will cause it to be copied). We
878 map that address for the register, noting the actual address as
879 an equivalent in case it can be substituted into the insns. */
881 if (GET_CODE (copy) != REG)
883 temp = copy_addr_to_reg (copy);
884 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
885 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
886 copy = temp;
888 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
890 else if (GET_CODE (loc) == MEM)
892 /* This is the case of a parameter that lives in memory. It
893 will live in the block we allocate in the called routine's
894 frame that simulates the incoming argument area. Do nothing
895 with the parameter now; we will call store_expr later. In
896 this case, however, we must ensure that the virtual stack and
897 incoming arg rtx values are expanded now so that we can be
898 sure we have enough slots in the const equiv map since the
899 store_expr call can easily blow the size estimate. */
900 if (DECL_FRAME_SIZE (fndecl) != 0)
901 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
903 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
904 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
906 else if (GET_CODE (loc) == REG)
907 process_reg_param (map, loc, copy);
908 else if (GET_CODE (loc) == CONCAT)
910 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
911 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
912 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
913 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
915 process_reg_param (map, locreal, copyreal);
916 process_reg_param (map, locimag, copyimag);
918 else
919 abort ();
922 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
923 specially. This function can be called recursively, so we need to
924 save the previous value. */
925 inlining_previous = inlining;
926 inlining = inl_f;
928 /* Now do the parameters that will be placed in memory. */
930 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
931 formal; formal = TREE_CHAIN (formal), i++)
933 loc = RTVEC_ELT (arg_vector, i);
935 if (GET_CODE (loc) == MEM
936 /* Exclude case handled above. */
937 && ! (GET_CODE (XEXP (loc, 0)) == REG
938 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
940 rtx note = emit_note (DECL_SOURCE_FILE (formal),
941 DECL_SOURCE_LINE (formal));
942 if (note)
943 RTX_INTEGRATED_P (note) = 1;
945 /* Compute the address in the area we reserved and store the
946 value there. */
947 temp = copy_rtx_and_substitute (loc, map, 1);
948 subst_constants (&temp, NULL_RTX, map, 1);
949 apply_change_group ();
950 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
951 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
952 store_expr (arg_trees[i], temp, 0);
956 /* Deal with the places that the function puts its result.
957 We are driven by what is placed into DECL_RESULT.
959 Initially, we assume that we don't have anything special handling for
960 REG_FUNCTION_RETURN_VALUE_P. */
962 map->inline_target = 0;
963 loc = DECL_RTL (DECL_RESULT (fndecl));
965 if (TYPE_MODE (type) == VOIDmode)
966 /* There is no return value to worry about. */
968 else if (GET_CODE (loc) == MEM)
970 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
972 temp = copy_rtx_and_substitute (loc, map, 1);
973 subst_constants (&temp, NULL_RTX, map, 1);
974 apply_change_group ();
975 target = temp;
977 else
979 if (! structure_value_addr
980 || ! aggregate_value_p (DECL_RESULT (fndecl)))
981 abort ();
983 /* Pass the function the address in which to return a structure
984 value. Note that a constructor can cause someone to call us
985 with STRUCTURE_VALUE_ADDR, but the initialization takes place
986 via the first parameter, rather than the struct return address.
988 We have two cases: If the address is a simple register
989 indirect, use the mapping mechanism to point that register to
990 our structure return address. Otherwise, store the structure
991 return value into the place that it will be referenced from. */
993 if (GET_CODE (XEXP (loc, 0)) == REG)
995 temp = force_operand (structure_value_addr, NULL_RTX);
996 temp = force_reg (Pmode, temp);
997 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
999 if (CONSTANT_P (structure_value_addr)
1000 || GET_CODE (structure_value_addr) == ADDRESSOF
1001 || (GET_CODE (structure_value_addr) == PLUS
1002 && (XEXP (structure_value_addr, 0)
1003 == virtual_stack_vars_rtx)
1004 && (GET_CODE (XEXP (structure_value_addr, 1))
1005 == CONST_INT)))
1007 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1008 CONST_AGE_PARM);
1011 else
1013 temp = copy_rtx_and_substitute (loc, map, 1);
1014 subst_constants (&temp, NULL_RTX, map, 0);
1015 apply_change_group ();
1016 emit_move_insn (temp, structure_value_addr);
1020 else if (ignore)
1021 /* We will ignore the result value, so don't look at its structure.
1022 Note that preparations for an aggregate return value
1023 do need to be made (above) even if it will be ignored. */
1025 else if (GET_CODE (loc) == REG)
1027 /* The function returns an object in a register and we use the return
1028 value. Set up our target for remapping. */
1030 /* Machine mode function was declared to return. */
1031 enum machine_mode departing_mode = TYPE_MODE (type);
1032 /* (Possibly wider) machine mode it actually computes
1033 (for the sake of callers that fail to declare it right).
1034 We have to use the mode of the result's RTL, rather than
1035 its type, since expand_function_start may have promoted it. */
1036 enum machine_mode arriving_mode
1037 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1038 rtx reg_to_map;
1040 /* Don't use MEMs as direct targets because on some machines
1041 substituting a MEM for a REG makes invalid insns.
1042 Let the combiner substitute the MEM if that is valid. */
1043 if (target == 0 || GET_CODE (target) != REG
1044 || GET_MODE (target) != departing_mode)
1046 /* Don't make BLKmode registers. If this looks like
1047 a BLKmode object being returned in a register, get
1048 the mode from that, otherwise abort. */
1049 if (departing_mode == BLKmode)
1051 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1053 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1054 arriving_mode = departing_mode;
1056 else
1057 abort ();
1060 target = gen_reg_rtx (departing_mode);
1063 /* If function's value was promoted before return,
1064 avoid machine mode mismatch when we substitute INLINE_TARGET.
1065 But TARGET is what we will return to the caller. */
1066 if (arriving_mode != departing_mode)
1068 /* Avoid creating a paradoxical subreg wider than
1069 BITS_PER_WORD, since that is illegal. */
1070 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1072 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1073 GET_MODE_BITSIZE (arriving_mode)))
1074 /* Maybe could be handled by using convert_move () ? */
1075 abort ();
1076 reg_to_map = gen_reg_rtx (arriving_mode);
1077 target = gen_lowpart (departing_mode, reg_to_map);
1079 else
1080 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1082 else
1083 reg_to_map = target;
1085 /* Usually, the result value is the machine's return register.
1086 Sometimes it may be a pseudo. Handle both cases. */
1087 if (REG_FUNCTION_VALUE_P (loc))
1088 map->inline_target = reg_to_map;
1089 else
1090 map->reg_map[REGNO (loc)] = reg_to_map;
1092 else
1093 abort ();
1095 /* Initialize label_map. get_label_from_map will actually make
1096 the labels. */
1097 memset ((char *) &map->label_map[min_labelno], 0,
1098 (max_labelno - min_labelno) * sizeof (rtx));
1100 /* Make copies of the decls of the symbols in the inline function, so that
1101 the copies of the variables get declared in the current function. Set
1102 up things so that lookup_static_chain knows that to interpret registers
1103 in SAVE_EXPRs for TYPE_SIZEs as local. */
1104 inline_function_decl = fndecl;
1105 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1106 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1107 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1108 inline_function_decl = 0;
1110 /* Make a fresh binding contour that we can easily remove. Do this after
1111 expanding our arguments so cleanups are properly scoped. */
1112 expand_start_bindings_and_block (0, block);
1114 /* Sort the block-map so that it will be easy to find remapped
1115 blocks later. */
1116 qsort (&VARRAY_TREE (map->block_map, 0),
1117 map->block_map->elements_used,
1118 sizeof (tree),
1119 compare_blocks);
1121 /* Perform postincrements before actually calling the function. */
1122 emit_queue ();
1124 /* Clean up stack so that variables might have smaller offsets. */
1125 do_pending_stack_adjust ();
1127 /* Save a copy of the location of const_equiv_varray for
1128 mark_stores, called via note_stores. */
1129 global_const_equiv_varray = map->const_equiv_varray;
1131 /* If the called function does an alloca, save and restore the
1132 stack pointer around the call. This saves stack space, but
1133 also is required if this inline is being done between two
1134 pushes. */
1135 if (inl_f->calls_alloca)
1136 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1138 /* Now copy the insns one by one. */
1139 copy_insn_list (insns, map, static_chain_value);
1141 /* Restore the stack pointer if we saved it above. */
1142 if (inl_f->calls_alloca)
1143 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1145 if (! cfun->x_whole_function_mode_p)
1146 /* In statement-at-a-time mode, we just tell the front-end to add
1147 this block to the list of blocks at this binding level. We
1148 can't do it the way it's done for function-at-a-time mode the
1149 superblocks have not been created yet. */
1150 insert_block (block);
1151 else
1153 BLOCK_CHAIN (block)
1154 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1155 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1158 /* End the scope containing the copied formal parameter variables
1159 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1160 here so that expand_end_bindings will not check for unused
1161 variables. That's already been checked for when the inlined
1162 function was defined. */
1163 expand_end_bindings (NULL_TREE, 1, 1);
1165 /* Must mark the line number note after inlined functions as a repeat, so
1166 that the test coverage code can avoid counting the call twice. This
1167 just tells the code to ignore the immediately following line note, since
1168 there already exists a copy of this note before the expanded inline call.
1169 This line number note is still needed for debugging though, so we can't
1170 delete it. */
1171 if (flag_test_coverage)
1172 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1174 emit_line_note (input_filename, lineno);
1176 /* If the function returns a BLKmode object in a register, copy it
1177 out of the temp register into a BLKmode memory object. */
1178 if (target
1179 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1180 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1181 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1183 if (structure_value_addr)
1185 target = gen_rtx_MEM (TYPE_MODE (type),
1186 memory_address (TYPE_MODE (type),
1187 structure_value_addr));
1188 set_mem_attributes (target, type, 1);
1191 /* Make sure we free the things we explicitly allocated with xmalloc. */
1192 if (real_label_map)
1193 free (real_label_map);
1194 VARRAY_FREE (map->const_equiv_varray);
1195 free (map->reg_map);
1196 VARRAY_FREE (map->block_map);
1197 free (map->insn_map);
1198 free (map);
1199 free (arg_vals);
1200 free (arg_trees);
1202 inlining = inlining_previous;
1204 return target;
1207 /* Make copies of each insn in the given list using the mapping
1208 computed in expand_inline_function. This function may call itself for
1209 insns containing sequences.
1211 Copying is done in two passes, first the insns and then their REG_NOTES.
1213 If static_chain_value is non-zero, it represents the context-pointer
1214 register for the function. */
1216 static void
1217 copy_insn_list (insns, map, static_chain_value)
1218 rtx insns;
1219 struct inline_remap *map;
1220 rtx static_chain_value;
1222 register int i;
1223 rtx insn;
1224 rtx temp;
1225 rtx local_return_label = NULL_RTX;
1226 #ifdef HAVE_cc0
1227 rtx cc0_insn = 0;
1228 #endif
1230 /* Copy the insns one by one. Do this in two passes, first the insns and
1231 then their REG_NOTES. */
1233 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1235 for (insn = insns; insn; insn = NEXT_INSN (insn))
1237 rtx copy, pattern, set;
1239 map->orig_asm_operands_vector = 0;
1241 switch (GET_CODE (insn))
1243 case INSN:
1244 pattern = PATTERN (insn);
1245 set = single_set (insn);
1246 copy = 0;
1247 if (GET_CODE (pattern) == USE
1248 && GET_CODE (XEXP (pattern, 0)) == REG
1249 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1250 /* The (USE (REG n)) at return from the function should
1251 be ignored since we are changing (REG n) into
1252 inline_target. */
1253 break;
1255 /* If the inline fn needs eh context, make sure that
1256 the current fn has one. */
1257 if (GET_CODE (pattern) == USE
1258 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1259 get_eh_context ();
1261 /* Ignore setting a function value that we don't want to use. */
1262 if (map->inline_target == 0
1263 && set != 0
1264 && GET_CODE (SET_DEST (set)) == REG
1265 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1267 if (volatile_refs_p (SET_SRC (set)))
1269 rtx new_set;
1271 /* If we must not delete the source,
1272 load it into a new temporary. */
1273 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1275 new_set = single_set (copy);
1276 if (new_set == 0)
1277 abort ();
1279 SET_DEST (new_set)
1280 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1282 /* If the source and destination are the same and it
1283 has a note on it, keep the insn. */
1284 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1285 && REG_NOTES (insn) != 0)
1286 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1287 else
1288 break;
1291 /* Similarly if an ignored return value is clobbered. */
1292 else if (map->inline_target == 0
1293 && GET_CODE (pattern) == CLOBBER
1294 && GET_CODE (XEXP (pattern, 0)) == REG
1295 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1296 break;
1298 /* If this is setting the static chain rtx, omit it. */
1299 else if (static_chain_value != 0
1300 && set != 0
1301 && GET_CODE (SET_DEST (set)) == REG
1302 && rtx_equal_p (SET_DEST (set),
1303 static_chain_incoming_rtx))
1304 break;
1306 /* If this is setting the static chain pseudo, set it from
1307 the value we want to give it instead. */
1308 else if (static_chain_value != 0
1309 && set != 0
1310 && rtx_equal_p (SET_SRC (set),
1311 static_chain_incoming_rtx))
1313 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1315 copy = emit_move_insn (newdest, static_chain_value);
1316 static_chain_value = 0;
1319 /* If this is setting the virtual stack vars register, this must
1320 be the code at the handler for a builtin longjmp. The value
1321 saved in the setjmp buffer will be the address of the frame
1322 we've made for this inlined instance within our frame. But we
1323 know the offset of that value so we can use it to reconstruct
1324 our virtual stack vars register from that value. If we are
1325 copying it from the stack pointer, leave it unchanged. */
1326 else if (set != 0
1327 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1329 HOST_WIDE_INT offset;
1330 temp = map->reg_map[REGNO (SET_DEST (set))];
1331 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1332 REGNO (temp)).rtx;
1334 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1335 offset = 0;
1336 else if (GET_CODE (temp) == PLUS
1337 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1338 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1339 offset = INTVAL (XEXP (temp, 1));
1340 else
1341 abort ();
1343 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1344 temp = SET_SRC (set);
1345 else
1346 temp = force_operand (plus_constant (SET_SRC (set),
1347 - offset),
1348 NULL_RTX);
1350 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1353 else
1354 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1355 /* REG_NOTES will be copied later. */
1357 #ifdef HAVE_cc0
1358 /* If this insn is setting CC0, it may need to look at
1359 the insn that uses CC0 to see what type of insn it is.
1360 In that case, the call to recog via validate_change will
1361 fail. So don't substitute constants here. Instead,
1362 do it when we emit the following insn.
1364 For example, see the pyr.md file. That machine has signed and
1365 unsigned compares. The compare patterns must check the
1366 following branch insn to see which what kind of compare to
1367 emit.
1369 If the previous insn set CC0, substitute constants on it as
1370 well. */
1371 if (sets_cc0_p (PATTERN (copy)) != 0)
1372 cc0_insn = copy;
1373 else
1375 if (cc0_insn)
1376 try_constants (cc0_insn, map);
1377 cc0_insn = 0;
1378 try_constants (copy, map);
1380 #else
1381 try_constants (copy, map);
1382 #endif
1383 break;
1385 case JUMP_INSN:
1386 if (GET_CODE (PATTERN (insn)) == RETURN
1387 || (GET_CODE (PATTERN (insn)) == PARALLEL
1388 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1390 if (local_return_label == 0)
1391 local_return_label = gen_label_rtx ();
1392 pattern = gen_jump (local_return_label);
1394 else
1395 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1397 copy = emit_jump_insn (pattern);
1399 #ifdef HAVE_cc0
1400 if (cc0_insn)
1401 try_constants (cc0_insn, map);
1402 cc0_insn = 0;
1403 #endif
1404 try_constants (copy, map);
1406 /* If this used to be a conditional jump insn but whose branch
1407 direction is now know, we must do something special. */
1408 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1410 #ifdef HAVE_cc0
1411 /* If the previous insn set cc0 for us, delete it. */
1412 if (sets_cc0_p (PREV_INSN (copy)))
1413 delete_insn (PREV_INSN (copy));
1414 #endif
1416 /* If this is now a no-op, delete it. */
1417 if (map->last_pc_value == pc_rtx)
1419 delete_insn (copy);
1420 copy = 0;
1422 else
1423 /* Otherwise, this is unconditional jump so we must put a
1424 BARRIER after it. We could do some dead code elimination
1425 here, but jump.c will do it just as well. */
1426 emit_barrier ();
1428 break;
1430 case CALL_INSN:
1431 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1432 three attached sequences: normal call, sibling call and tail
1433 recursion. */
1434 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1436 rtx sequence[3];
1437 rtx tail_label;
1439 for (i = 0; i < 3; i++)
1441 rtx seq;
1443 sequence[i] = NULL_RTX;
1444 seq = XEXP (PATTERN (insn), i);
1445 if (seq)
1447 start_sequence ();
1448 copy_insn_list (seq, map, static_chain_value);
1449 sequence[i] = get_insns ();
1450 end_sequence ();
1454 /* Find the new tail recursion label.
1455 It will already be substituted into sequence[2]. */
1456 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1457 map, 0);
1459 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1460 sequence[0],
1461 sequence[1],
1462 sequence[2],
1463 tail_label));
1464 break;
1467 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1468 copy = emit_call_insn (pattern);
1470 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1471 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1473 /* Because the USAGE information potentially contains objects other
1474 than hard registers, we need to copy it. */
1476 CALL_INSN_FUNCTION_USAGE (copy)
1477 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1478 map, 0);
1480 #ifdef HAVE_cc0
1481 if (cc0_insn)
1482 try_constants (cc0_insn, map);
1483 cc0_insn = 0;
1484 #endif
1485 try_constants (copy, map);
1487 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1488 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1489 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1490 break;
1492 case CODE_LABEL:
1493 copy = emit_label (get_label_from_map (map,
1494 CODE_LABEL_NUMBER (insn)));
1495 LABEL_NAME (copy) = LABEL_NAME (insn);
1496 map->const_age++;
1497 break;
1499 case BARRIER:
1500 copy = emit_barrier ();
1501 break;
1503 case NOTE:
1504 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1505 discarded because it is important to have only one of
1506 each in the current function.
1508 NOTE_INSN_DELETED notes aren't useful.
1510 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1511 pointer (which will soon be dangling) confuses flow's
1512 attempts to preserve bb structures during the compilation
1513 of a function. */
1515 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1516 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1517 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1518 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1520 copy = emit_note (NOTE_SOURCE_FILE (insn),
1521 NOTE_LINE_NUMBER (insn));
1522 if (copy
1523 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1524 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1526 rtx label
1527 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
1529 /* We have to duplicate the handlers for the original. */
1530 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1532 /* We need to duplicate the handlers for the EH region
1533 and we need to indicate where the label map is */
1534 eif_eh_map = map;
1535 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1536 CODE_LABEL_NUMBER (label),
1537 expand_inline_function_eh_labelmap);
1540 /* We have to forward these both to match the new exception
1541 region. */
1542 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
1544 else if (copy
1545 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1546 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1547 && NOTE_BLOCK (insn))
1549 tree *mapped_block_p;
1551 mapped_block_p
1552 = (tree *) bsearch (NOTE_BLOCK (insn),
1553 &VARRAY_TREE (map->block_map, 0),
1554 map->block_map->elements_used,
1555 sizeof (tree),
1556 find_block);
1558 if (!mapped_block_p)
1559 abort ();
1560 else
1561 NOTE_BLOCK (copy) = *mapped_block_p;
1564 else
1565 copy = 0;
1566 break;
1568 default:
1569 abort ();
1572 if (copy)
1573 RTX_INTEGRATED_P (copy) = 1;
1575 map->insn_map[INSN_UID (insn)] = copy;
1578 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1579 from parameters can be substituted in. These are the only ones that
1580 are valid across the entire function. */
1581 map->const_age++;
1582 for (insn = insns; insn; insn = NEXT_INSN (insn))
1583 if (INSN_P (insn)
1584 && map->insn_map[INSN_UID (insn)]
1585 && REG_NOTES (insn))
1587 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1589 /* We must also do subst_constants, in case one of our parameters
1590 has const type and constant value. */
1591 subst_constants (&note, NULL_RTX, map, 0);
1592 apply_change_group ();
1593 REG_NOTES (map->insn_map[INSN_UID (insn)]) = note;
1595 /* Finally, delete any REG_LABEL notes from the chain. */
1596 for (; note; note = next)
1598 next = XEXP (note, 1);
1599 if (REG_NOTE_KIND (note) == REG_LABEL)
1600 remove_note (map->insn_map[INSN_UID (insn)], note);
1604 if (local_return_label)
1605 emit_label (local_return_label);
1608 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1609 push all of those decls and give each one the corresponding home. */
1611 static void
1612 integrate_parm_decls (args, map, arg_vector)
1613 tree args;
1614 struct inline_remap *map;
1615 rtvec arg_vector;
1617 register tree tail;
1618 register int i;
1620 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1622 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1623 current_function_decl);
1624 rtx new_decl_rtl
1625 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1627 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1628 here, but that's going to require some more work. */
1629 /* DECL_INCOMING_RTL (decl) = ?; */
1630 /* Fully instantiate the address with the equivalent form so that the
1631 debugging information contains the actual register, instead of the
1632 virtual register. Do this by not passing an insn to
1633 subst_constants. */
1634 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1635 apply_change_group ();
1636 DECL_RTL (decl) = new_decl_rtl;
1640 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1641 current function a tree of contexts isomorphic to the one that is given.
1643 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1644 registers used in the DECL_RTL field should be remapped. If it is zero,
1645 no mapping is necessary. */
1647 static tree
1648 integrate_decl_tree (let, map)
1649 tree let;
1650 struct inline_remap *map;
1652 tree t;
1653 tree new_block;
1654 tree *next;
1656 new_block = make_node (BLOCK);
1657 VARRAY_PUSH_TREE (map->block_map, new_block);
1658 next = &BLOCK_VARS (new_block);
1660 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1662 tree d;
1664 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1666 if (DECL_RTL (t) != 0)
1668 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map, 1);
1670 /* Fully instantiate the address with the equivalent form so that the
1671 debugging information contains the actual register, instead of the
1672 virtual register. Do this by not passing an insn to
1673 subst_constants. */
1674 subst_constants (&DECL_RTL (d), NULL_RTX, map, 1);
1675 apply_change_group ();
1678 /* Add this declaration to the list of variables in the new
1679 block. */
1680 *next = d;
1681 next = &TREE_CHAIN (d);
1684 next = &BLOCK_SUBBLOCKS (new_block);
1685 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1687 *next = integrate_decl_tree (t, map);
1688 BLOCK_SUPERCONTEXT (*next) = new_block;
1689 next = &BLOCK_CHAIN (*next);
1692 TREE_USED (new_block) = TREE_USED (let);
1693 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1695 return new_block;
1698 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1699 except for those few rtx codes that are sharable.
1701 We always return an rtx that is similar to that incoming rtx, with the
1702 exception of possibly changing a REG to a SUBREG or vice versa. No
1703 rtl is ever emitted.
1705 If FOR_LHS is nonzero, if means we are processing something that will
1706 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1707 inlining since we need to be conservative in how it is set for
1708 such cases.
1710 Handle constants that need to be placed in the constant pool by
1711 calling `force_const_mem'. */
1714 copy_rtx_and_substitute (orig, map, for_lhs)
1715 register rtx orig;
1716 struct inline_remap *map;
1717 int for_lhs;
1719 register rtx copy, temp;
1720 register int i, j;
1721 register RTX_CODE code;
1722 register enum machine_mode mode;
1723 register const char *format_ptr;
1724 int regno;
1726 if (orig == 0)
1727 return 0;
1729 code = GET_CODE (orig);
1730 mode = GET_MODE (orig);
1732 switch (code)
1734 case REG:
1735 /* If the stack pointer register shows up, it must be part of
1736 stack-adjustments (*not* because we eliminated the frame pointer!).
1737 Small hard registers are returned as-is. Pseudo-registers
1738 go through their `reg_map'. */
1739 regno = REGNO (orig);
1740 if (regno <= LAST_VIRTUAL_REGISTER
1741 || (map->integrating
1742 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1744 /* Some hard registers are also mapped,
1745 but others are not translated. */
1746 if (map->reg_map[regno] != 0
1747 /* We shouldn't usually have reg_map set for return
1748 register, but it may happen if we have leaf-register
1749 remapping and the return register is used in one of
1750 the calling sequences of a call_placeholer. In this
1751 case, we'll end up with a reg_map set for this
1752 register, but we don't want to use for registers
1753 marked as return values. */
1754 && ! REG_FUNCTION_VALUE_P (orig))
1755 return map->reg_map[regno];
1757 /* If this is the virtual frame pointer, make space in current
1758 function's stack frame for the stack frame of the inline function.
1760 Copy the address of this area into a pseudo. Map
1761 virtual_stack_vars_rtx to this pseudo and set up a constant
1762 equivalence for it to be the address. This will substitute the
1763 address into insns where it can be substituted and use the new
1764 pseudo where it can't. */
1765 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1767 rtx loc, seq;
1768 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1769 #ifdef FRAME_GROWS_DOWNWARD
1770 int alignment
1771 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1772 / BITS_PER_UNIT);
1774 /* In this case, virtual_stack_vars_rtx points to one byte
1775 higher than the top of the frame area. So make sure we
1776 allocate a big enough chunk to keep the frame pointer
1777 aligned like a real one. */
1778 if (alignment)
1779 size = CEIL_ROUND (size, alignment);
1780 #endif
1781 start_sequence ();
1782 loc = assign_stack_temp (BLKmode, size, 1);
1783 loc = XEXP (loc, 0);
1784 #ifdef FRAME_GROWS_DOWNWARD
1785 /* In this case, virtual_stack_vars_rtx points to one byte
1786 higher than the top of the frame area. So compute the offset
1787 to one byte higher than our substitute frame. */
1788 loc = plus_constant (loc, size);
1789 #endif
1790 map->reg_map[regno] = temp
1791 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1793 #ifdef STACK_BOUNDARY
1794 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1795 #endif
1797 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1799 seq = gen_sequence ();
1800 end_sequence ();
1801 emit_insn_after (seq, map->insns_at_start);
1802 return temp;
1804 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1805 || (map->integrating
1806 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1807 == orig)))
1809 /* Do the same for a block to contain any arguments referenced
1810 in memory. */
1811 rtx loc, seq;
1812 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1814 start_sequence ();
1815 loc = assign_stack_temp (BLKmode, size, 1);
1816 loc = XEXP (loc, 0);
1817 /* When arguments grow downward, the virtual incoming
1818 args pointer points to the top of the argument block,
1819 so the remapped location better do the same. */
1820 #ifdef ARGS_GROW_DOWNWARD
1821 loc = plus_constant (loc, size);
1822 #endif
1823 map->reg_map[regno] = temp
1824 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1826 #ifdef STACK_BOUNDARY
1827 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1828 #endif
1830 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1832 seq = gen_sequence ();
1833 end_sequence ();
1834 emit_insn_after (seq, map->insns_at_start);
1835 return temp;
1837 else if (REG_FUNCTION_VALUE_P (orig))
1839 /* This is a reference to the function return value. If
1840 the function doesn't have a return value, error. If the
1841 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1842 if (map->inline_target == 0)
1843 /* Must be unrolling loops or replicating code if we
1844 reach here, so return the register unchanged. */
1845 return orig;
1846 else if (GET_MODE (map->inline_target) != BLKmode
1847 && mode != GET_MODE (map->inline_target))
1848 return gen_lowpart (mode, map->inline_target);
1849 else
1850 return map->inline_target;
1852 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1853 /* If leaf_renumber_regs_insn() might remap this register to
1854 some other number, make sure we don't share it with the
1855 inlined function, otherwise delayed optimization of the
1856 inlined function may change it in place, breaking our
1857 reference to it. We may still shared it within the
1858 function, so create an entry for this register in the
1859 reg_map. */
1860 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1861 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1863 temp = gen_rtx_REG (mode, regno);
1864 map->reg_map[regno] = temp;
1865 return temp;
1867 #endif
1868 else
1869 return orig;
1871 abort ();
1873 if (map->reg_map[regno] == NULL)
1875 map->reg_map[regno] = gen_reg_rtx (mode);
1876 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1877 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1878 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1879 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1881 if (map->regno_pointer_flag[regno])
1882 mark_reg_pointer (map->reg_map[regno],
1883 map->regno_pointer_align[regno]);
1885 return map->reg_map[regno];
1887 case SUBREG:
1888 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1889 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1890 if (GET_CODE (copy) == SUBREG)
1891 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1892 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1893 else if (GET_CODE (copy) == CONCAT)
1895 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1897 if (GET_MODE (retval) == GET_MODE (orig))
1898 return retval;
1899 else
1900 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1901 (SUBREG_WORD (orig) %
1902 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1903 / (unsigned) UNITS_PER_WORD)));
1905 else
1906 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1907 SUBREG_WORD (orig));
1909 case ADDRESSOF:
1910 copy = gen_rtx_ADDRESSOF (mode,
1911 copy_rtx_and_substitute (XEXP (orig, 0),
1912 map, for_lhs),
1913 0, ADDRESSOF_DECL (orig));
1914 regno = ADDRESSOF_REGNO (orig);
1915 if (map->reg_map[regno])
1916 regno = REGNO (map->reg_map[regno]);
1917 else if (regno > LAST_VIRTUAL_REGISTER)
1919 temp = XEXP (orig, 0);
1920 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1921 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1922 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1923 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1924 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1926 if (map->regno_pointer_flag[regno])
1927 mark_reg_pointer (map->reg_map[regno],
1928 map->regno_pointer_align[regno]);
1929 regno = REGNO (map->reg_map[regno]);
1931 ADDRESSOF_REGNO (copy) = regno;
1932 return copy;
1934 case USE:
1935 case CLOBBER:
1936 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1937 to (use foo) if the original insn didn't have a subreg.
1938 Removing the subreg distorts the VAX movstrhi pattern
1939 by changing the mode of an operand. */
1940 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1941 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1942 copy = SUBREG_REG (copy);
1943 return gen_rtx_fmt_e (code, VOIDmode, copy);
1945 case CODE_LABEL:
1946 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1947 = LABEL_PRESERVE_P (orig);
1948 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1950 /* We need to handle "deleted" labels that appear in the DECL_RTL
1951 of a LABEL_DECL. */
1952 case NOTE:
1953 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1954 return map->insn_map[INSN_UID (orig)];
1955 break;
1957 case LABEL_REF:
1958 copy
1959 = gen_rtx_LABEL_REF
1960 (mode,
1961 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1962 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1964 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1966 /* The fact that this label was previously nonlocal does not mean
1967 it still is, so we must check if it is within the range of
1968 this function's labels. */
1969 LABEL_REF_NONLOCAL_P (copy)
1970 = (LABEL_REF_NONLOCAL_P (orig)
1971 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1972 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1974 /* If we have made a nonlocal label local, it means that this
1975 inlined call will be referring to our nonlocal goto handler.
1976 So make sure we create one for this block; we normally would
1977 not since this is not otherwise considered a "call". */
1978 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1979 function_call_count++;
1981 return copy;
1983 case PC:
1984 case CC0:
1985 case CONST_INT:
1986 return orig;
1988 case SYMBOL_REF:
1989 /* Symbols which represent the address of a label stored in the constant
1990 pool must be modified to point to a constant pool entry for the
1991 remapped label. Otherwise, symbols are returned unchanged. */
1992 if (CONSTANT_POOL_ADDRESS_P (orig))
1994 struct function *f = inlining ? inlining : cfun;
1995 rtx constant = get_pool_constant_for_function (f, orig);
1996 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1997 if (inlining)
1999 rtx temp = force_const_mem (const_mode,
2000 copy_rtx_and_substitute (constant,
2001 map, 0));
2003 #if 0
2004 /* Legitimizing the address here is incorrect.
2006 Since we had a SYMBOL_REF before, we can assume it is valid
2007 to have one in this position in the insn.
2009 Also, change_address may create new registers. These
2010 registers will not have valid reg_map entries. This can
2011 cause try_constants() to fail because assumes that all
2012 registers in the rtx have valid reg_map entries, and it may
2013 end up replacing one of these new registers with junk. */
2015 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2016 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2017 #endif
2019 temp = XEXP (temp, 0);
2021 #ifdef POINTERS_EXTEND_UNSIGNED
2022 if (GET_MODE (temp) != GET_MODE (orig))
2023 temp = convert_memory_address (GET_MODE (orig), temp);
2024 #endif
2025 return temp;
2027 else if (GET_CODE (constant) == LABEL_REF)
2028 return XEXP (force_const_mem
2029 (GET_MODE (orig),
2030 copy_rtx_and_substitute (constant, map, for_lhs)),
2033 else if (SYMBOL_REF_NEED_ADJUST (orig))
2035 eif_eh_map = map;
2036 return rethrow_symbol_map (orig,
2037 expand_inline_function_eh_labelmap);
2040 return orig;
2042 case CONST_DOUBLE:
2043 /* We have to make a new copy of this CONST_DOUBLE because don't want
2044 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2045 duplicate of a CONST_DOUBLE we have already seen. */
2046 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2048 REAL_VALUE_TYPE d;
2050 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2051 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2053 else
2054 return immed_double_const (CONST_DOUBLE_LOW (orig),
2055 CONST_DOUBLE_HIGH (orig), VOIDmode);
2057 case CONST:
2058 /* Make new constant pool entry for a constant
2059 that was in the pool of the inline function. */
2060 if (RTX_INTEGRATED_P (orig))
2061 abort ();
2062 break;
2064 case ASM_OPERANDS:
2065 /* If a single asm insn contains multiple output operands then
2066 it contains multiple ASM_OPERANDS rtx's that share the input
2067 and constraint vecs. We must make sure that the copied insn
2068 continues to share it. */
2069 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2071 copy = rtx_alloc (ASM_OPERANDS);
2072 copy->volatil = orig->volatil;
2073 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2074 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2075 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2076 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2077 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2078 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2079 = map->copy_asm_constraints_vector;
2080 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2081 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2082 return copy;
2084 break;
2086 case CALL:
2087 /* This is given special treatment because the first
2088 operand of a CALL is a (MEM ...) which may get
2089 forced into a register for cse. This is undesirable
2090 if function-address cse isn't wanted or if we won't do cse. */
2091 #ifndef NO_FUNCTION_CSE
2092 if (! (optimize && ! flag_no_function_cse))
2093 #endif
2094 return
2095 gen_rtx_CALL
2096 (GET_MODE (orig),
2097 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2098 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2099 map, 0)),
2100 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2101 break;
2103 #if 0
2104 /* Must be ifdefed out for loop unrolling to work. */
2105 case RETURN:
2106 abort ();
2107 #endif
2109 case SET:
2110 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2111 Adjust the setting by the offset of the area we made.
2112 If the nonlocal goto is into the current function,
2113 this will result in unnecessarily bad code, but should work. */
2114 if (SET_DEST (orig) == virtual_stack_vars_rtx
2115 || SET_DEST (orig) == virtual_incoming_args_rtx)
2117 /* In case a translation hasn't occurred already, make one now. */
2118 rtx equiv_reg;
2119 rtx equiv_loc;
2120 HOST_WIDE_INT loc_offset;
2122 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2123 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2124 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2125 REGNO (equiv_reg)).rtx;
2126 loc_offset
2127 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2129 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2130 force_operand
2131 (plus_constant
2132 (copy_rtx_and_substitute (SET_SRC (orig),
2133 map, 0),
2134 - loc_offset),
2135 NULL_RTX));
2137 else
2138 return gen_rtx_SET (VOIDmode,
2139 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2140 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2141 break;
2143 case MEM:
2144 if (inlining
2145 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2146 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2148 enum machine_mode const_mode
2149 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2150 rtx constant
2151 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2153 constant = copy_rtx_and_substitute (constant, map, 0);
2155 /* If this was an address of a constant pool entry that itself
2156 had to be placed in the constant pool, it might not be a
2157 valid address. So the recursive call might have turned it
2158 into a register. In that case, it isn't a constant any
2159 more, so return it. This has the potential of changing a
2160 MEM into a REG, but we'll assume that it safe. */
2161 if (! CONSTANT_P (constant))
2162 return constant;
2164 return validize_mem (force_const_mem (const_mode, constant));
2167 copy = rtx_alloc (MEM);
2168 PUT_MODE (copy, mode);
2169 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2170 MEM_COPY_ATTRIBUTES (copy, orig);
2171 return copy;
2173 default:
2174 break;
2177 copy = rtx_alloc (code);
2178 PUT_MODE (copy, mode);
2179 copy->in_struct = orig->in_struct;
2180 copy->volatil = orig->volatil;
2181 copy->unchanging = orig->unchanging;
2183 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2185 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2187 switch (*format_ptr++)
2189 case '0':
2190 /* Copy this through the wide int field; that's safest. */
2191 X0WINT (copy, i) = X0WINT (orig, i);
2192 break;
2194 case 'e':
2195 XEXP (copy, i)
2196 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2197 break;
2199 case 'u':
2200 /* Change any references to old-insns to point to the
2201 corresponding copied insns. */
2202 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2203 break;
2205 case 'E':
2206 XVEC (copy, i) = XVEC (orig, i);
2207 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2209 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2210 for (j = 0; j < XVECLEN (copy, i); j++)
2211 XVECEXP (copy, i, j)
2212 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2213 map, for_lhs);
2215 break;
2217 case 'w':
2218 XWINT (copy, i) = XWINT (orig, i);
2219 break;
2221 case 'i':
2222 XINT (copy, i) = XINT (orig, i);
2223 break;
2225 case 's':
2226 XSTR (copy, i) = XSTR (orig, i);
2227 break;
2229 case 't':
2230 XTREE (copy, i) = XTREE (orig, i);
2231 break;
2233 default:
2234 abort ();
2238 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2240 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2241 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2242 map->copy_asm_constraints_vector
2243 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2246 return copy;
2249 /* Substitute known constant values into INSN, if that is valid. */
2251 void
2252 try_constants (insn, map)
2253 rtx insn;
2254 struct inline_remap *map;
2256 int i;
2258 map->num_sets = 0;
2260 /* First try just updating addresses, then other things. This is
2261 important when we have something like the store of a constant
2262 into memory and we can update the memory address but the machine
2263 does not support a constant source. */
2264 subst_constants (&PATTERN (insn), insn, map, 1);
2265 apply_change_group ();
2266 subst_constants (&PATTERN (insn), insn, map, 0);
2267 apply_change_group ();
2269 /* Show we don't know the value of anything stored or clobbered. */
2270 note_stores (PATTERN (insn), mark_stores, NULL);
2271 map->last_pc_value = 0;
2272 #ifdef HAVE_cc0
2273 map->last_cc0_value = 0;
2274 #endif
2276 /* Set up any constant equivalences made in this insn. */
2277 for (i = 0; i < map->num_sets; i++)
2279 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2281 int regno = REGNO (map->equiv_sets[i].dest);
2283 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2284 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2285 /* Following clause is a hack to make case work where GNU C++
2286 reassigns a variable to make cse work right. */
2287 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2288 regno).rtx,
2289 map->equiv_sets[i].equiv))
2290 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2291 map->equiv_sets[i].equiv, map->const_age);
2293 else if (map->equiv_sets[i].dest == pc_rtx)
2294 map->last_pc_value = map->equiv_sets[i].equiv;
2295 #ifdef HAVE_cc0
2296 else if (map->equiv_sets[i].dest == cc0_rtx)
2297 map->last_cc0_value = map->equiv_sets[i].equiv;
2298 #endif
2302 /* Substitute known constants for pseudo regs in the contents of LOC,
2303 which are part of INSN.
2304 If INSN is zero, the substitution should always be done (this is used to
2305 update DECL_RTL).
2306 These changes are taken out by try_constants if the result is not valid.
2308 Note that we are more concerned with determining when the result of a SET
2309 is a constant, for further propagation, than actually inserting constants
2310 into insns; cse will do the latter task better.
2312 This function is also used to adjust address of items previously addressed
2313 via the virtual stack variable or virtual incoming arguments registers.
2315 If MEMONLY is nonzero, only make changes inside a MEM. */
2317 static void
2318 subst_constants (loc, insn, map, memonly)
2319 rtx *loc;
2320 rtx insn;
2321 struct inline_remap *map;
2322 int memonly;
2324 rtx x = *loc;
2325 register int i, j;
2326 register enum rtx_code code;
2327 register const char *format_ptr;
2328 int num_changes = num_validated_changes ();
2329 rtx new = 0;
2330 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2332 code = GET_CODE (x);
2334 switch (code)
2336 case PC:
2337 case CONST_INT:
2338 case CONST_DOUBLE:
2339 case SYMBOL_REF:
2340 case CONST:
2341 case LABEL_REF:
2342 case ADDRESS:
2343 return;
2345 #ifdef HAVE_cc0
2346 case CC0:
2347 if (! memonly)
2348 validate_change (insn, loc, map->last_cc0_value, 1);
2349 return;
2350 #endif
2352 case USE:
2353 case CLOBBER:
2354 /* The only thing we can do with a USE or CLOBBER is possibly do
2355 some substitutions in a MEM within it. */
2356 if (GET_CODE (XEXP (x, 0)) == MEM)
2357 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2358 return;
2360 case REG:
2361 /* Substitute for parms and known constants. Don't replace
2362 hard regs used as user variables with constants. */
2363 if (! memonly)
2365 int regno = REGNO (x);
2366 struct const_equiv_data *p;
2368 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2369 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2370 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2371 p->rtx != 0)
2372 && p->age >= map->const_age)
2373 validate_change (insn, loc, p->rtx, 1);
2375 return;
2377 case SUBREG:
2378 /* SUBREG applied to something other than a reg
2379 should be treated as ordinary, since that must
2380 be a special hack and we don't know how to treat it specially.
2381 Consider for example mulsidi3 in m68k.md.
2382 Ordinary SUBREG of a REG needs this special treatment. */
2383 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2385 rtx inner = SUBREG_REG (x);
2386 rtx new = 0;
2388 /* We can't call subst_constants on &SUBREG_REG (x) because any
2389 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2390 see what is inside, try to form the new SUBREG and see if that is
2391 valid. We handle two cases: extracting a full word in an
2392 integral mode and extracting the low part. */
2393 subst_constants (&inner, NULL_RTX, map, 0);
2395 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2396 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2397 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2398 new = operand_subword (inner, SUBREG_WORD (x), 0,
2399 GET_MODE (SUBREG_REG (x)));
2401 cancel_changes (num_changes);
2402 if (new == 0 && subreg_lowpart_p (x))
2403 new = gen_lowpart_common (GET_MODE (x), inner);
2405 if (new)
2406 validate_change (insn, loc, new, 1);
2408 return;
2410 break;
2412 case MEM:
2413 subst_constants (&XEXP (x, 0), insn, map, 0);
2415 /* If a memory address got spoiled, change it back. */
2416 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2417 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2418 cancel_changes (num_changes);
2419 return;
2421 case SET:
2423 /* Substitute constants in our source, and in any arguments to a
2424 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2425 itself. */
2426 rtx *dest_loc = &SET_DEST (x);
2427 rtx dest = *dest_loc;
2428 rtx src, tem;
2430 subst_constants (&SET_SRC (x), insn, map, memonly);
2431 src = SET_SRC (x);
2433 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2434 || GET_CODE (*dest_loc) == SUBREG
2435 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2437 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2439 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2440 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2442 dest_loc = &XEXP (*dest_loc, 0);
2445 /* Do substitute in the address of a destination in memory. */
2446 if (GET_CODE (*dest_loc) == MEM)
2447 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2449 /* Check for the case of DEST a SUBREG, both it and the underlying
2450 register are less than one word, and the SUBREG has the wider mode.
2451 In the case, we are really setting the underlying register to the
2452 source converted to the mode of DEST. So indicate that. */
2453 if (GET_CODE (dest) == SUBREG
2454 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2455 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2456 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2457 <= GET_MODE_SIZE (GET_MODE (dest)))
2458 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2459 src)))
2460 src = tem, dest = SUBREG_REG (dest);
2462 /* If storing a recognizable value save it for later recording. */
2463 if ((map->num_sets < MAX_RECOG_OPERANDS)
2464 && (CONSTANT_P (src)
2465 || (GET_CODE (src) == REG
2466 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2467 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2468 || (GET_CODE (src) == PLUS
2469 && GET_CODE (XEXP (src, 0)) == REG
2470 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2471 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2472 && CONSTANT_P (XEXP (src, 1)))
2473 || GET_CODE (src) == COMPARE
2474 #ifdef HAVE_cc0
2475 || dest == cc0_rtx
2476 #endif
2477 || (dest == pc_rtx
2478 && (src == pc_rtx || GET_CODE (src) == RETURN
2479 || GET_CODE (src) == LABEL_REF))))
2481 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2482 it will cause us to save the COMPARE with any constants
2483 substituted, which is what we want for later. */
2484 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2485 map->equiv_sets[map->num_sets++].dest = dest;
2488 return;
2490 default:
2491 break;
2494 format_ptr = GET_RTX_FORMAT (code);
2496 /* If the first operand is an expression, save its mode for later. */
2497 if (*format_ptr == 'e')
2498 op0_mode = GET_MODE (XEXP (x, 0));
2500 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2502 switch (*format_ptr++)
2504 case '0':
2505 break;
2507 case 'e':
2508 if (XEXP (x, i))
2509 subst_constants (&XEXP (x, i), insn, map, memonly);
2510 break;
2512 case 'u':
2513 case 'i':
2514 case 's':
2515 case 'w':
2516 case 'n':
2517 case 't':
2518 break;
2520 case 'E':
2521 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2522 for (j = 0; j < XVECLEN (x, i); j++)
2523 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2525 break;
2527 default:
2528 abort ();
2532 /* If this is a commutative operation, move a constant to the second
2533 operand unless the second operand is already a CONST_INT. */
2534 if (! memonly
2535 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2536 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2538 rtx tem = XEXP (x, 0);
2539 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2540 validate_change (insn, &XEXP (x, 1), tem, 1);
2543 /* Simplify the expression in case we put in some constants. */
2544 if (! memonly)
2545 switch (GET_RTX_CLASS (code))
2547 case '1':
2548 if (op0_mode == MAX_MACHINE_MODE)
2549 abort ();
2550 new = simplify_unary_operation (code, GET_MODE (x),
2551 XEXP (x, 0), op0_mode);
2552 break;
2554 case '<':
2556 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2558 if (op_mode == VOIDmode)
2559 op_mode = GET_MODE (XEXP (x, 1));
2560 new = simplify_relational_operation (code, op_mode,
2561 XEXP (x, 0), XEXP (x, 1));
2562 #ifdef FLOAT_STORE_FLAG_VALUE
2563 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2565 enum machine_mode mode = GET_MODE (x);
2566 if (new == const0_rtx)
2567 new = CONST0_RTX (mode);
2568 else
2570 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2571 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2574 #endif
2575 break;
2578 case '2':
2579 case 'c':
2580 new = simplify_binary_operation (code, GET_MODE (x),
2581 XEXP (x, 0), XEXP (x, 1));
2582 break;
2584 case 'b':
2585 case '3':
2586 if (op0_mode == MAX_MACHINE_MODE)
2587 abort ();
2589 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2590 XEXP (x, 0), XEXP (x, 1),
2591 XEXP (x, 2));
2592 break;
2595 if (new)
2596 validate_change (insn, loc, new, 1);
2599 /* Show that register modified no longer contain known constants. We are
2600 called from note_stores with parts of the new insn. */
2602 static void
2603 mark_stores (dest, x, data)
2604 rtx dest;
2605 rtx x ATTRIBUTE_UNUSED;
2606 void *data ATTRIBUTE_UNUSED;
2608 int regno = -1;
2609 enum machine_mode mode = VOIDmode;
2611 /* DEST is always the innermost thing set, except in the case of
2612 SUBREGs of hard registers. */
2614 if (GET_CODE (dest) == REG)
2615 regno = REGNO (dest), mode = GET_MODE (dest);
2616 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2618 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2619 mode = GET_MODE (SUBREG_REG (dest));
2622 if (regno >= 0)
2624 unsigned int uregno = regno;
2625 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2626 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2627 unsigned int i;
2629 /* Ignore virtual stack var or virtual arg register since those
2630 are handled separately. */
2631 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2632 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2633 for (i = uregno; i <= last_reg; i++)
2634 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2635 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2639 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2640 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2641 that it points to the node itself, thus indicating that the node is its
2642 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2643 the given node is NULL, recursively descend the decl/block tree which
2644 it is the root of, and for each other ..._DECL or BLOCK node contained
2645 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2646 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2647 values to point to themselves. */
2649 static void
2650 set_block_origin_self (stmt)
2651 register tree stmt;
2653 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2655 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2658 register tree local_decl;
2660 for (local_decl = BLOCK_VARS (stmt);
2661 local_decl != NULL_TREE;
2662 local_decl = TREE_CHAIN (local_decl))
2663 set_decl_origin_self (local_decl); /* Potential recursion. */
2667 register tree subblock;
2669 for (subblock = BLOCK_SUBBLOCKS (stmt);
2670 subblock != NULL_TREE;
2671 subblock = BLOCK_CHAIN (subblock))
2672 set_block_origin_self (subblock); /* Recurse. */
2677 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2678 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2679 node to so that it points to the node itself, thus indicating that the
2680 node represents its own (abstract) origin. Additionally, if the
2681 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2682 the decl/block tree of which the given node is the root of, and for
2683 each other ..._DECL or BLOCK node contained therein whose
2684 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2685 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2686 point to themselves. */
2688 void
2689 set_decl_origin_self (decl)
2690 register tree decl;
2692 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2694 DECL_ABSTRACT_ORIGIN (decl) = decl;
2695 if (TREE_CODE (decl) == FUNCTION_DECL)
2697 register tree arg;
2699 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2700 DECL_ABSTRACT_ORIGIN (arg) = arg;
2701 if (DECL_INITIAL (decl) != NULL_TREE
2702 && DECL_INITIAL (decl) != error_mark_node)
2703 set_block_origin_self (DECL_INITIAL (decl));
2708 /* Given a pointer to some BLOCK node, and a boolean value to set the
2709 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2710 the given block, and for all local decls and all local sub-blocks
2711 (recursively) which are contained therein. */
2713 static void
2714 set_block_abstract_flags (stmt, setting)
2715 register tree stmt;
2716 register int setting;
2718 register tree local_decl;
2719 register tree subblock;
2721 BLOCK_ABSTRACT (stmt) = setting;
2723 for (local_decl = BLOCK_VARS (stmt);
2724 local_decl != NULL_TREE;
2725 local_decl = TREE_CHAIN (local_decl))
2726 set_decl_abstract_flags (local_decl, setting);
2728 for (subblock = BLOCK_SUBBLOCKS (stmt);
2729 subblock != NULL_TREE;
2730 subblock = BLOCK_CHAIN (subblock))
2731 set_block_abstract_flags (subblock, setting);
2734 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2735 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2736 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2737 set the abstract flags for all of the parameters, local vars, local
2738 blocks and sub-blocks (recursively) to the same setting. */
2740 void
2741 set_decl_abstract_flags (decl, setting)
2742 register tree decl;
2743 register int setting;
2745 DECL_ABSTRACT (decl) = setting;
2746 if (TREE_CODE (decl) == FUNCTION_DECL)
2748 register tree arg;
2750 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2751 DECL_ABSTRACT (arg) = setting;
2752 if (DECL_INITIAL (decl) != NULL_TREE
2753 && DECL_INITIAL (decl) != error_mark_node)
2754 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2758 /* Output the assembly language code for the function FNDECL
2759 from its DECL_SAVED_INSNS. Used for inline functions that are output
2760 at end of compilation instead of where they came in the source. */
2762 void
2763 output_inline_function (fndecl)
2764 tree fndecl;
2766 struct function *old_cfun = cfun;
2767 enum debug_info_type old_write_symbols = write_symbols;
2768 struct function *f = DECL_SAVED_INSNS (fndecl);
2770 cfun = f;
2771 current_function_decl = fndecl;
2772 clear_emit_caches ();
2774 set_new_last_label_num (f->inl_max_label_num);
2776 /* We're not deferring this any longer. */
2777 DECL_DEFER_OUTPUT (fndecl) = 0;
2779 /* If requested, suppress debugging information. */
2780 if (f->no_debugging_symbols)
2781 write_symbols = NO_DEBUG;
2783 /* Compile this function all the way down to assembly code. */
2784 rest_of_compilation (fndecl);
2786 /* We can't inline this anymore. */
2787 f->inlinable = 0;
2788 DECL_INLINE (fndecl) = 0;
2790 cfun = old_cfun;
2791 current_function_decl = old_cfun ? old_cfun->decl : 0;
2792 write_symbols = old_write_symbols;