2000-05-02 Jeff Sturm <jsturm@one-point.com>
[official-gcc.git] / gcc / integrate.c
blob6795d823cc55f1625d1d5fa16275cd73652e2ca4
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "intl.h"
41 #include "loop.h"
42 #include "params.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
69 #endif
71 static rtvec initialize_for_inline PARAMS ((tree));
72 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
74 rtvec));
75 static tree integrate_decl_tree PARAMS ((tree,
76 struct inline_remap *));
77 static void subst_constants PARAMS ((rtx *, rtx,
78 struct inline_remap *, int));
79 static void set_block_origin_self PARAMS ((tree));
80 static void set_block_abstract_flags PARAMS ((tree, int));
81 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
82 rtx));
83 void set_decl_abstract_flags PARAMS ((tree, int));
84 static void mark_stores PARAMS ((rtx, rtx, void *));
85 static void save_parm_insns PARAMS ((rtx, rtx));
86 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
87 rtx));
88 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
89 int));
90 static int compare_blocks PARAMS ((const PTR, const PTR));
91 static int find_block PARAMS ((const PTR, const PTR));
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
109 rtx x = map->label_map[i];
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx ();
114 return x;
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
129 /* For functions marked as inline increase the maximum size to
130 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (MAX_INLINE_INSNS
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
138 register int ninsns = 0;
139 register tree parms;
141 if (DECL_UNINLINABLE (fndecl))
142 return N_("function cannot be inline");
144 /* No inlines with varargs. */
145 if ((last && TREE_VALUE (last) != void_type_node)
146 || current_function_varargs)
147 return N_("varargs function cannot be inline");
149 if (current_function_calls_alloca)
150 return N_("function using alloca cannot be inline");
152 if (current_function_calls_setjmp)
153 return N_("function using setjmp cannot be inline");
155 if (current_function_calls_eh_return)
156 return N_("function uses __builtin_eh_return");
158 if (current_function_contains_functions)
159 return N_("function with nested functions cannot be inline");
161 if (forced_labels)
162 return
163 N_("function with label addresses used in initializers cannot inline");
165 if (current_function_cannot_inline)
166 return current_function_cannot_inline;
168 /* If its not even close, don't even look. */
169 if (get_max_uid () > 3 * max_insns)
170 return N_("function too large to be inline");
172 #if 0
173 /* Don't inline functions which do not specify a function prototype and
174 have BLKmode argument or take the address of a parameter. */
175 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
177 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
178 TREE_ADDRESSABLE (parms) = 1;
179 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
180 return N_("no prototype, and parameter address used; cannot be inline");
182 #endif
184 /* We can't inline functions that return structures
185 the old-fashioned PCC way, copying into a static block. */
186 if (current_function_returns_pcc_struct)
187 return N_("inline functions not supported for this return value type");
189 /* We can't inline functions that return structures of varying size. */
190 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
191 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
192 return N_("function with varying-size return value cannot be inline");
194 /* Cannot inline a function with a varying size argument or one that
195 receives a transparent union. */
196 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
198 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
199 return N_("function with varying-size parameter cannot be inline");
200 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
201 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
202 return N_("function with transparent unit parameter cannot be inline");
205 if (get_max_uid () > max_insns)
207 for (ninsns = 0, insn = get_first_nonparm_insn ();
208 insn && ninsns < max_insns;
209 insn = NEXT_INSN (insn))
210 if (INSN_P (insn))
211 ninsns++;
213 if (ninsns >= max_insns)
214 return N_("function too large to be inline");
217 /* We will not inline a function which uses computed goto. The addresses of
218 its local labels, which may be tucked into global storage, are of course
219 not constant across instantiations, which causes unexpected behaviour. */
220 if (current_function_has_computed_jump)
221 return N_("function with computed jump cannot inline");
223 /* We cannot inline a nested function that jumps to a nonlocal label. */
224 if (current_function_has_nonlocal_goto)
225 return N_("function with nonlocal goto cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
230 rtx result = DECL_RTL (DECL_RESULT (fndecl));
231 if (GET_CODE (result) == PARALLEL)
232 return N_("inline functions not supported for this return value type");
235 /* If the function has a target specific attribute attached to it,
236 then we assume that we should not inline it. This can be overriden
237 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
238 if (DECL_MACHINE_ATTRIBUTES (fndecl)
239 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
240 return N_("function with target specific attribute(s) cannot be inlined");
242 return NULL;
245 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
246 Zero for a reg that isn't a parm's home.
247 Only reg numbers less than max_parm_reg are mapped here. */
248 static tree *parmdecl_map;
250 /* In save_for_inline, nonzero if past the parm-initialization insns. */
251 static int in_nonparm_insns;
253 /* Subroutine for `save_for_inline'. Performs initialization
254 needed to save FNDECL's insns and info for future inline expansion. */
256 static rtvec
257 initialize_for_inline (fndecl)
258 tree fndecl;
260 int i;
261 rtvec arg_vector;
262 tree parms;
264 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
265 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
266 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
268 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
269 parms;
270 parms = TREE_CHAIN (parms), i++)
272 rtx p = DECL_RTL (parms);
274 /* If we have (mem (addressof (mem ...))), use the inner MEM since
275 otherwise the copy_rtx call below will not unshare the MEM since
276 it shares ADDRESSOF. */
277 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
278 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
279 p = XEXP (XEXP (p, 0), 0);
281 RTVEC_ELT (arg_vector, i) = p;
283 if (GET_CODE (p) == REG)
284 parmdecl_map[REGNO (p)] = parms;
285 else if (GET_CODE (p) == CONCAT)
287 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
288 rtx pimag = gen_imagpart (GET_MODE (preal), p);
290 if (GET_CODE (preal) == REG)
291 parmdecl_map[REGNO (preal)] = parms;
292 if (GET_CODE (pimag) == REG)
293 parmdecl_map[REGNO (pimag)] = parms;
296 /* This flag is cleared later
297 if the function ever modifies the value of the parm. */
298 TREE_READONLY (parms) = 1;
301 return arg_vector;
304 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
305 originally was in the FROM_FN, but now it will be in the
306 TO_FN. */
308 tree
309 copy_decl_for_inlining (decl, from_fn, to_fn)
310 tree decl;
311 tree from_fn;
312 tree to_fn;
314 tree copy;
316 /* Copy the declaration. */
317 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
319 /* For a parameter, we must make an equivalent VAR_DECL, not a
320 new PARM_DECL. */
321 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
322 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
323 TREE_READONLY (copy) = TREE_READONLY (decl);
324 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
326 else
328 copy = copy_node (decl);
329 if (DECL_LANG_SPECIFIC (copy))
330 copy_lang_decl (copy);
332 /* TREE_ADDRESSABLE isn't used to indicate that a label's
333 address has been taken; it's for internal bookkeeping in
334 expand_goto_internal. */
335 if (TREE_CODE (copy) == LABEL_DECL)
336 TREE_ADDRESSABLE (copy) = 0;
339 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
340 declaration inspired this copy. */
341 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
343 /* The new variable/label has no RTL, yet. */
344 SET_DECL_RTL (copy, NULL_RTX);
346 /* These args would always appear unused, if not for this. */
347 TREE_USED (copy) = 1;
349 /* Set the context for the new declaration. */
350 if (!DECL_CONTEXT (decl))
351 /* Globals stay global. */
353 else if (DECL_CONTEXT (decl) != from_fn)
354 /* Things that weren't in the scope of the function we're inlining
355 from aren't in the scope we're inlining too, either. */
357 else if (TREE_STATIC (decl))
358 /* Function-scoped static variables should say in the original
359 function. */
361 else
362 /* Ordinary automatic local variables are now in the scope of the
363 new function. */
364 DECL_CONTEXT (copy) = to_fn;
366 return copy;
369 /* Make the insns and PARM_DECLs of the current function permanent
370 and record other information in DECL_SAVED_INSNS to allow inlining
371 of this function in subsequent calls.
373 This routine need not copy any insns because we are not going
374 to immediately compile the insns in the insn chain. There
375 are two cases when we would compile the insns for FNDECL:
376 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
377 be output at the end of other compilation, because somebody took
378 its address. In the first case, the insns of FNDECL are copied
379 as it is expanded inline, so FNDECL's saved insns are not
380 modified. In the second case, FNDECL is used for the last time,
381 so modifying the rtl is not a problem.
383 We don't have to worry about FNDECL being inline expanded by
384 other functions which are written at the end of compilation
385 because flag_no_inline is turned on when we begin writing
386 functions at the end of compilation. */
388 void
389 save_for_inline (fndecl)
390 tree fndecl;
392 rtx insn;
393 rtvec argvec;
394 rtx first_nonparm_insn;
396 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
397 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
398 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
399 for the parms, prior to elimination of virtual registers.
400 These values are needed for substituting parms properly. */
402 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
404 /* Make and emit a return-label if we have not already done so. */
406 if (return_label == 0)
408 return_label = gen_label_rtx ();
409 emit_label (return_label);
412 argvec = initialize_for_inline (fndecl);
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
418 insn = get_insns ();
419 if (GET_CODE (insn) != NOTE)
420 abort ();
422 /* Get the insn which signals the end of parameter setup code. */
423 first_nonparm_insn = get_first_nonparm_insn ();
425 /* Now just scan the chain of insns to see what happens to our
426 PARM_DECLs. If a PARM_DECL is used but never modified, we
427 can substitute its rtl directly when expanding inline (and
428 perform constant folding when its incoming value is constant).
429 Otherwise, we have to copy its value into a new register and track
430 the new register's life. */
431 in_nonparm_insns = 0;
432 save_parm_insns (insn, first_nonparm_insn);
434 cfun->inl_max_label_num = max_label_num ();
435 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
436 cfun->original_arg_vector = argvec;
437 cfun->original_decl_initial = DECL_INITIAL (fndecl);
438 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
439 DECL_SAVED_INSNS (fndecl) = cfun;
441 /* Clean up. */
442 free (parmdecl_map);
445 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
446 PARM_DECL is used but never modified, we can substitute its rtl directly
447 when expanding inline (and perform constant folding when its incoming
448 value is constant). Otherwise, we have to copy its value into a new
449 register and track the new register's life. */
451 static void
452 save_parm_insns (insn, first_nonparm_insn)
453 rtx insn;
454 rtx first_nonparm_insn;
456 if (insn == NULL_RTX)
457 return;
459 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
461 if (insn == first_nonparm_insn)
462 in_nonparm_insns = 1;
464 if (INSN_P (insn))
466 /* Record what interesting things happen to our parameters. */
467 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
469 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
470 three attached sequences: normal call, sibling call and tail
471 recursion. */
472 if (GET_CODE (insn) == CALL_INSN
473 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
475 int i;
477 for (i = 0; i < 3; i++)
478 save_parm_insns (XEXP (PATTERN (insn), i),
479 first_nonparm_insn);
485 /* Note whether a parameter is modified or not. */
487 static void
488 note_modified_parmregs (reg, x, data)
489 rtx reg;
490 rtx x ATTRIBUTE_UNUSED;
491 void *data ATTRIBUTE_UNUSED;
493 if (GET_CODE (reg) == REG && in_nonparm_insns
494 && REGNO (reg) < max_parm_reg
495 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
496 && parmdecl_map[REGNO (reg)] != 0)
497 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
500 /* Unfortunately, we need a global copy of const_equiv map for communication
501 with a function called from note_stores. Be *very* careful that this
502 is used properly in the presence of recursion. */
504 varray_type global_const_equiv_varray;
506 #define FIXED_BASE_PLUS_P(X) \
507 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
508 && GET_CODE (XEXP (X, 0)) == REG \
509 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
510 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
512 /* Called to set up a mapping for the case where a parameter is in a
513 register. If it is read-only and our argument is a constant, set up the
514 constant equivalence.
516 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
517 if it is a register.
519 Also, don't allow hard registers here; they might not be valid when
520 substituted into insns. */
521 static void
522 process_reg_param (map, loc, copy)
523 struct inline_remap *map;
524 rtx loc, copy;
526 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
527 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
528 && ! REG_USERVAR_P (copy))
529 || (GET_CODE (copy) == REG
530 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
532 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
533 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
534 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
535 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
536 copy = temp;
538 map->reg_map[REGNO (loc)] = copy;
541 /* Compare two BLOCKs for qsort. The key we sort on is the
542 BLOCK_ABSTRACT_ORIGIN of the blocks. */
544 static int
545 compare_blocks (v1, v2)
546 const PTR v1;
547 const PTR v2;
549 tree b1 = *((const tree *) v1);
550 tree b2 = *((const tree *) v2);
552 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
553 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
556 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
557 an original block; the second to a remapped equivalent. */
559 static int
560 find_block (v1, v2)
561 const PTR v1;
562 const PTR v2;
564 const union tree_node *b1 = (const union tree_node *) v1;
565 tree b2 = *((const tree *) v2);
567 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
570 /* Integrate the procedure defined by FNDECL. Note that this function
571 may wind up calling itself. Since the static variables are not
572 reentrant, we do not assign them until after the possibility
573 of recursion is eliminated.
575 If IGNORE is nonzero, do not produce a value.
576 Otherwise store the value in TARGET if it is nonzero and that is convenient.
578 Value is:
579 (rtx)-1 if we could not substitute the function
580 0 if we substituted it and it does not produce a value
581 else an rtx for where the value is stored. */
584 expand_inline_function (fndecl, parms, target, ignore, type,
585 structure_value_addr)
586 tree fndecl, parms;
587 rtx target;
588 int ignore;
589 tree type;
590 rtx structure_value_addr;
592 struct function *inlining_previous;
593 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
594 tree formal, actual, block;
595 rtx parm_insns = inl_f->emit->x_first_insn;
596 rtx insns = (inl_f->inl_last_parm_insn
597 ? NEXT_INSN (inl_f->inl_last_parm_insn)
598 : parm_insns);
599 tree *arg_trees;
600 rtx *arg_vals;
601 int max_regno;
602 register int i;
603 int min_labelno = inl_f->emit->x_first_label_num;
604 int max_labelno = inl_f->inl_max_label_num;
605 int nargs;
606 rtx loc;
607 rtx stack_save = 0;
608 rtx temp;
609 struct inline_remap *map = 0;
610 #ifdef HAVE_cc0
611 rtx cc0_insn = 0;
612 #endif
613 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
614 rtx static_chain_value = 0;
615 int inl_max_uid;
616 int eh_region_offset;
618 /* The pointer used to track the true location of the memory used
619 for MAP->LABEL_MAP. */
620 rtx *real_label_map = 0;
622 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
623 max_regno = inl_f->emit->x_reg_rtx_no + 3;
624 if (max_regno < FIRST_PSEUDO_REGISTER)
625 abort ();
627 /* Pull out the decl for the function definition; fndecl may be a
628 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
629 fndecl = inl_f->decl;
631 nargs = list_length (DECL_ARGUMENTS (fndecl));
633 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
634 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
636 /* Check that the parms type match and that sufficient arguments were
637 passed. Since the appropriate conversions or default promotions have
638 already been applied, the machine modes should match exactly. */
640 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
641 formal;
642 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
644 tree arg;
645 enum machine_mode mode;
647 if (actual == 0)
648 return (rtx) (HOST_WIDE_INT) -1;
650 arg = TREE_VALUE (actual);
651 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
653 if (arg == error_mark_node
654 || mode != TYPE_MODE (TREE_TYPE (arg))
655 /* If they are block mode, the types should match exactly.
656 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
657 which could happen if the parameter has incomplete type. */
658 || (mode == BLKmode
659 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
660 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
661 return (rtx) (HOST_WIDE_INT) -1;
664 /* Extra arguments are valid, but will be ignored below, so we must
665 evaluate them here for side-effects. */
666 for (; actual; actual = TREE_CHAIN (actual))
667 expand_expr (TREE_VALUE (actual), const0_rtx,
668 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
670 /* Expand the function arguments. Do this first so that any
671 new registers get created before we allocate the maps. */
673 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
674 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
676 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
677 formal;
678 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
680 /* Actual parameter, converted to the type of the argument within the
681 function. */
682 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
683 /* Mode of the variable used within the function. */
684 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
685 int invisiref = 0;
687 arg_trees[i] = arg;
688 loc = RTVEC_ELT (arg_vector, i);
690 /* If this is an object passed by invisible reference, we copy the
691 object into a stack slot and save its address. If this will go
692 into memory, we do nothing now. Otherwise, we just expand the
693 argument. */
694 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
695 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
697 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
699 store_expr (arg, stack_slot, 0);
700 arg_vals[i] = XEXP (stack_slot, 0);
701 invisiref = 1;
703 else if (GET_CODE (loc) != MEM)
705 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
707 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
708 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
710 pmode = promote_mode (TREE_TYPE (formal), pmode,
711 &unsignedp, 0);
713 if (GET_MODE (loc) != pmode)
714 abort ();
716 /* The mode if LOC and ARG can differ if LOC was a variable
717 that had its mode promoted via PROMOTED_MODE. */
718 arg_vals[i] = convert_modes (pmode,
719 TYPE_MODE (TREE_TYPE (arg)),
720 expand_expr (arg, NULL_RTX, mode,
721 EXPAND_SUM),
722 unsignedp);
724 else
725 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
727 else
728 arg_vals[i] = 0;
730 if (arg_vals[i] != 0
731 && (! TREE_READONLY (formal)
732 /* If the parameter is not read-only, copy our argument through
733 a register. Also, we cannot use ARG_VALS[I] if it overlaps
734 TARGET in any way. In the inline function, they will likely
735 be two different pseudos, and `safe_from_p' will make all
736 sorts of smart assumptions about their not conflicting.
737 But if ARG_VALS[I] overlaps TARGET, these assumptions are
738 wrong, so put ARG_VALS[I] into a fresh register.
739 Don't worry about invisible references, since their stack
740 temps will never overlap the target. */
741 || (target != 0
742 && ! invisiref
743 && (GET_CODE (arg_vals[i]) == REG
744 || GET_CODE (arg_vals[i]) == SUBREG
745 || GET_CODE (arg_vals[i]) == MEM)
746 && reg_overlap_mentioned_p (arg_vals[i], target))
747 /* ??? We must always copy a SUBREG into a REG, because it might
748 get substituted into an address, and not all ports correctly
749 handle SUBREGs in addresses. */
750 || (GET_CODE (arg_vals[i]) == SUBREG)))
751 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
753 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
754 && POINTER_TYPE_P (TREE_TYPE (formal)))
755 mark_reg_pointer (arg_vals[i],
756 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
759 /* Allocate the structures we use to remap things. */
761 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
762 map->fndecl = fndecl;
764 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
765 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
767 /* We used to use alloca here, but the size of what it would try to
768 allocate would occasionally cause it to exceed the stack limit and
769 cause unpredictable core dumps. */
770 real_label_map
771 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
772 map->label_map = real_label_map;
773 map->local_return_label = NULL_RTX;
775 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
776 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
777 map->min_insnno = 0;
778 map->max_insnno = inl_max_uid;
780 map->integrating = 1;
781 map->compare_src = NULL_RTX;
782 map->compare_mode = VOIDmode;
784 /* const_equiv_varray maps pseudos in our routine to constants, so
785 it needs to be large enough for all our pseudos. This is the
786 number we are currently using plus the number in the called
787 routine, plus 15 for each arg, five to compute the virtual frame
788 pointer, and five for the return value. This should be enough
789 for most cases. We do not reference entries outside the range of
790 the map.
792 ??? These numbers are quite arbitrary and were obtained by
793 experimentation. At some point, we should try to allocate the
794 table after all the parameters are set up so we an more accurately
795 estimate the number of pseudos we will need. */
797 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
798 (max_reg_num ()
799 + (max_regno - FIRST_PSEUDO_REGISTER)
800 + 15 * nargs
801 + 10),
802 "expand_inline_function");
803 map->const_age = 0;
805 /* Record the current insn in case we have to set up pointers to frame
806 and argument memory blocks. If there are no insns yet, add a dummy
807 insn that can be used as an insertion point. */
808 map->insns_at_start = get_last_insn ();
809 if (map->insns_at_start == 0)
810 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
812 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
813 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
815 /* Update the outgoing argument size to allow for those in the inlined
816 function. */
817 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
818 current_function_outgoing_args_size = inl_f->outgoing_args_size;
820 /* If the inline function needs to make PIC references, that means
821 that this function's PIC offset table must be used. */
822 if (inl_f->uses_pic_offset_table)
823 current_function_uses_pic_offset_table = 1;
825 /* If this function needs a context, set it up. */
826 if (inl_f->needs_context)
827 static_chain_value = lookup_static_chain (fndecl);
829 if (GET_CODE (parm_insns) == NOTE
830 && NOTE_LINE_NUMBER (parm_insns) > 0)
832 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
833 NOTE_LINE_NUMBER (parm_insns));
834 if (note)
835 RTX_INTEGRATED_P (note) = 1;
838 /* Process each argument. For each, set up things so that the function's
839 reference to the argument will refer to the argument being passed.
840 We only replace REG with REG here. Any simplifications are done
841 via const_equiv_map.
843 We make two passes: In the first, we deal with parameters that will
844 be placed into registers, since we need to ensure that the allocated
845 register number fits in const_equiv_map. Then we store all non-register
846 parameters into their memory location. */
848 /* Don't try to free temp stack slots here, because we may put one of the
849 parameters into a temp stack slot. */
851 for (i = 0; i < nargs; i++)
853 rtx copy = arg_vals[i];
855 loc = RTVEC_ELT (arg_vector, i);
857 /* There are three cases, each handled separately. */
858 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
859 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
861 /* This must be an object passed by invisible reference (it could
862 also be a variable-sized object, but we forbid inlining functions
863 with variable-sized arguments). COPY is the address of the
864 actual value (this computation will cause it to be copied). We
865 map that address for the register, noting the actual address as
866 an equivalent in case it can be substituted into the insns. */
868 if (GET_CODE (copy) != REG)
870 temp = copy_addr_to_reg (copy);
871 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
872 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
873 copy = temp;
875 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
877 else if (GET_CODE (loc) == MEM)
879 /* This is the case of a parameter that lives in memory. It
880 will live in the block we allocate in the called routine's
881 frame that simulates the incoming argument area. Do nothing
882 with the parameter now; we will call store_expr later. In
883 this case, however, we must ensure that the virtual stack and
884 incoming arg rtx values are expanded now so that we can be
885 sure we have enough slots in the const equiv map since the
886 store_expr call can easily blow the size estimate. */
887 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
888 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
890 else if (GET_CODE (loc) == REG)
891 process_reg_param (map, loc, copy);
892 else if (GET_CODE (loc) == CONCAT)
894 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
895 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
896 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
897 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
899 process_reg_param (map, locreal, copyreal);
900 process_reg_param (map, locimag, copyimag);
902 else
903 abort ();
906 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
907 specially. This function can be called recursively, so we need to
908 save the previous value. */
909 inlining_previous = inlining;
910 inlining = inl_f;
912 /* Now do the parameters that will be placed in memory. */
914 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
915 formal; formal = TREE_CHAIN (formal), i++)
917 loc = RTVEC_ELT (arg_vector, i);
919 if (GET_CODE (loc) == MEM
920 /* Exclude case handled above. */
921 && ! (GET_CODE (XEXP (loc, 0)) == REG
922 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
924 rtx note = emit_note (DECL_SOURCE_FILE (formal),
925 DECL_SOURCE_LINE (formal));
926 if (note)
927 RTX_INTEGRATED_P (note) = 1;
929 /* Compute the address in the area we reserved and store the
930 value there. */
931 temp = copy_rtx_and_substitute (loc, map, 1);
932 subst_constants (&temp, NULL_RTX, map, 1);
933 apply_change_group ();
934 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
935 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
936 store_expr (arg_trees[i], temp, 0);
940 /* Deal with the places that the function puts its result.
941 We are driven by what is placed into DECL_RESULT.
943 Initially, we assume that we don't have anything special handling for
944 REG_FUNCTION_RETURN_VALUE_P. */
946 map->inline_target = 0;
947 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
948 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
950 if (TYPE_MODE (type) == VOIDmode)
951 /* There is no return value to worry about. */
953 else if (GET_CODE (loc) == MEM)
955 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
957 temp = copy_rtx_and_substitute (loc, map, 1);
958 subst_constants (&temp, NULL_RTX, map, 1);
959 apply_change_group ();
960 target = temp;
962 else
964 if (! structure_value_addr
965 || ! aggregate_value_p (DECL_RESULT (fndecl)))
966 abort ();
968 /* Pass the function the address in which to return a structure
969 value. Note that a constructor can cause someone to call us
970 with STRUCTURE_VALUE_ADDR, but the initialization takes place
971 via the first parameter, rather than the struct return address.
973 We have two cases: If the address is a simple register
974 indirect, use the mapping mechanism to point that register to
975 our structure return address. Otherwise, store the structure
976 return value into the place that it will be referenced from. */
978 if (GET_CODE (XEXP (loc, 0)) == REG)
980 temp = force_operand (structure_value_addr, NULL_RTX);
981 temp = force_reg (Pmode, temp);
982 /* A virtual register might be invalid in an insn, because
983 it can cause trouble in reload. Since we don't have access
984 to the expanders at map translation time, make sure we have
985 a proper register now.
986 If a virtual register is actually valid, cse or combine
987 can put it into the mapped insns. */
988 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
989 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
990 temp = copy_to_mode_reg (Pmode, temp);
991 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
993 if (CONSTANT_P (structure_value_addr)
994 || GET_CODE (structure_value_addr) == ADDRESSOF
995 || (GET_CODE (structure_value_addr) == PLUS
996 && (XEXP (structure_value_addr, 0)
997 == virtual_stack_vars_rtx)
998 && (GET_CODE (XEXP (structure_value_addr, 1))
999 == CONST_INT)))
1001 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1002 CONST_AGE_PARM);
1005 else
1007 temp = copy_rtx_and_substitute (loc, map, 1);
1008 subst_constants (&temp, NULL_RTX, map, 0);
1009 apply_change_group ();
1010 emit_move_insn (temp, structure_value_addr);
1014 else if (ignore)
1015 /* We will ignore the result value, so don't look at its structure.
1016 Note that preparations for an aggregate return value
1017 do need to be made (above) even if it will be ignored. */
1019 else if (GET_CODE (loc) == REG)
1021 /* The function returns an object in a register and we use the return
1022 value. Set up our target for remapping. */
1024 /* Machine mode function was declared to return. */
1025 enum machine_mode departing_mode = TYPE_MODE (type);
1026 /* (Possibly wider) machine mode it actually computes
1027 (for the sake of callers that fail to declare it right).
1028 We have to use the mode of the result's RTL, rather than
1029 its type, since expand_function_start may have promoted it. */
1030 enum machine_mode arriving_mode
1031 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1032 rtx reg_to_map;
1034 /* Don't use MEMs as direct targets because on some machines
1035 substituting a MEM for a REG makes invalid insns.
1036 Let the combiner substitute the MEM if that is valid. */
1037 if (target == 0 || GET_CODE (target) != REG
1038 || GET_MODE (target) != departing_mode)
1040 /* Don't make BLKmode registers. If this looks like
1041 a BLKmode object being returned in a register, get
1042 the mode from that, otherwise abort. */
1043 if (departing_mode == BLKmode)
1045 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1047 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1048 arriving_mode = departing_mode;
1050 else
1051 abort ();
1054 target = gen_reg_rtx (departing_mode);
1057 /* If function's value was promoted before return,
1058 avoid machine mode mismatch when we substitute INLINE_TARGET.
1059 But TARGET is what we will return to the caller. */
1060 if (arriving_mode != departing_mode)
1062 /* Avoid creating a paradoxical subreg wider than
1063 BITS_PER_WORD, since that is illegal. */
1064 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1066 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1067 GET_MODE_BITSIZE (arriving_mode)))
1068 /* Maybe could be handled by using convert_move () ? */
1069 abort ();
1070 reg_to_map = gen_reg_rtx (arriving_mode);
1071 target = gen_lowpart (departing_mode, reg_to_map);
1073 else
1074 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1076 else
1077 reg_to_map = target;
1079 /* Usually, the result value is the machine's return register.
1080 Sometimes it may be a pseudo. Handle both cases. */
1081 if (REG_FUNCTION_VALUE_P (loc))
1082 map->inline_target = reg_to_map;
1083 else
1084 map->reg_map[REGNO (loc)] = reg_to_map;
1086 else
1087 abort ();
1089 /* Initialize label_map. get_label_from_map will actually make
1090 the labels. */
1091 memset ((char *) &map->label_map[min_labelno], 0,
1092 (max_labelno - min_labelno) * sizeof (rtx));
1094 /* Make copies of the decls of the symbols in the inline function, so that
1095 the copies of the variables get declared in the current function. Set
1096 up things so that lookup_static_chain knows that to interpret registers
1097 in SAVE_EXPRs for TYPE_SIZEs as local. */
1098 inline_function_decl = fndecl;
1099 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1100 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1101 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1102 inline_function_decl = 0;
1104 /* Make a fresh binding contour that we can easily remove. Do this after
1105 expanding our arguments so cleanups are properly scoped. */
1106 expand_start_bindings_and_block (0, block);
1108 /* Sort the block-map so that it will be easy to find remapped
1109 blocks later. */
1110 qsort (&VARRAY_TREE (map->block_map, 0),
1111 map->block_map->elements_used,
1112 sizeof (tree),
1113 compare_blocks);
1115 /* Perform postincrements before actually calling the function. */
1116 emit_queue ();
1118 /* Clean up stack so that variables might have smaller offsets. */
1119 do_pending_stack_adjust ();
1121 /* Save a copy of the location of const_equiv_varray for
1122 mark_stores, called via note_stores. */
1123 global_const_equiv_varray = map->const_equiv_varray;
1125 /* If the called function does an alloca, save and restore the
1126 stack pointer around the call. This saves stack space, but
1127 also is required if this inline is being done between two
1128 pushes. */
1129 if (inl_f->calls_alloca)
1130 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1132 /* Now copy the insns one by one. */
1133 copy_insn_list (insns, map, static_chain_value);
1135 /* Duplicate the EH regions. This will create an offset from the
1136 region numbers in the function we're inlining to the region
1137 numbers in the calling function. This must wait until after
1138 copy_insn_list, as we need the insn map to be complete. */
1139 eh_region_offset = duplicate_eh_regions (inl_f, map);
1141 /* Now copy the REG_NOTES for those insns. */
1142 copy_insn_notes (insns, map, eh_region_offset);
1144 /* If the insn sequence required one, emit the return label. */
1145 if (map->local_return_label)
1146 emit_label (map->local_return_label);
1148 /* Restore the stack pointer if we saved it above. */
1149 if (inl_f->calls_alloca)
1150 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1152 if (! cfun->x_whole_function_mode_p)
1153 /* In statement-at-a-time mode, we just tell the front-end to add
1154 this block to the list of blocks at this binding level. We
1155 can't do it the way it's done for function-at-a-time mode the
1156 superblocks have not been created yet. */
1157 insert_block (block);
1158 else
1160 BLOCK_CHAIN (block)
1161 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1162 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1165 /* End the scope containing the copied formal parameter variables
1166 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1167 here so that expand_end_bindings will not check for unused
1168 variables. That's already been checked for when the inlined
1169 function was defined. */
1170 expand_end_bindings (NULL_TREE, 1, 1);
1172 /* Must mark the line number note after inlined functions as a repeat, so
1173 that the test coverage code can avoid counting the call twice. This
1174 just tells the code to ignore the immediately following line note, since
1175 there already exists a copy of this note before the expanded inline call.
1176 This line number note is still needed for debugging though, so we can't
1177 delete it. */
1178 if (flag_test_coverage)
1179 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1181 emit_line_note (input_filename, lineno);
1183 /* If the function returns a BLKmode object in a register, copy it
1184 out of the temp register into a BLKmode memory object. */
1185 if (target
1186 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1187 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1188 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1190 if (structure_value_addr)
1192 target = gen_rtx_MEM (TYPE_MODE (type),
1193 memory_address (TYPE_MODE (type),
1194 structure_value_addr));
1195 set_mem_attributes (target, type, 1);
1198 /* Make sure we free the things we explicitly allocated with xmalloc. */
1199 if (real_label_map)
1200 free (real_label_map);
1201 VARRAY_FREE (map->const_equiv_varray);
1202 free (map->reg_map);
1203 VARRAY_FREE (map->block_map);
1204 free (map->insn_map);
1205 free (map);
1206 free (arg_vals);
1207 free (arg_trees);
1209 inlining = inlining_previous;
1211 return target;
1214 /* Make copies of each insn in the given list using the mapping
1215 computed in expand_inline_function. This function may call itself for
1216 insns containing sequences.
1218 Copying is done in two passes, first the insns and then their REG_NOTES.
1220 If static_chain_value is non-zero, it represents the context-pointer
1221 register for the function. */
1223 static void
1224 copy_insn_list (insns, map, static_chain_value)
1225 rtx insns;
1226 struct inline_remap *map;
1227 rtx static_chain_value;
1229 register int i;
1230 rtx insn;
1231 rtx temp;
1232 #ifdef HAVE_cc0
1233 rtx cc0_insn = 0;
1234 #endif
1236 /* Copy the insns one by one. Do this in two passes, first the insns and
1237 then their REG_NOTES. */
1239 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1241 for (insn = insns; insn; insn = NEXT_INSN (insn))
1243 rtx copy, pattern, set;
1245 map->orig_asm_operands_vector = 0;
1247 switch (GET_CODE (insn))
1249 case INSN:
1250 pattern = PATTERN (insn);
1251 set = single_set (insn);
1252 copy = 0;
1253 if (GET_CODE (pattern) == USE
1254 && GET_CODE (XEXP (pattern, 0)) == REG
1255 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1256 /* The (USE (REG n)) at return from the function should
1257 be ignored since we are changing (REG n) into
1258 inline_target. */
1259 break;
1261 /* Ignore setting a function value that we don't want to use. */
1262 if (map->inline_target == 0
1263 && set != 0
1264 && GET_CODE (SET_DEST (set)) == REG
1265 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1267 if (volatile_refs_p (SET_SRC (set)))
1269 rtx new_set;
1271 /* If we must not delete the source,
1272 load it into a new temporary. */
1273 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1275 new_set = single_set (copy);
1276 if (new_set == 0)
1277 abort ();
1279 SET_DEST (new_set)
1280 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1282 /* If the source and destination are the same and it
1283 has a note on it, keep the insn. */
1284 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1285 && REG_NOTES (insn) != 0)
1286 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1287 else
1288 break;
1291 /* Similarly if an ignored return value is clobbered. */
1292 else if (map->inline_target == 0
1293 && GET_CODE (pattern) == CLOBBER
1294 && GET_CODE (XEXP (pattern, 0)) == REG
1295 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1296 break;
1298 /* If this is setting the static chain rtx, omit it. */
1299 else if (static_chain_value != 0
1300 && set != 0
1301 && GET_CODE (SET_DEST (set)) == REG
1302 && rtx_equal_p (SET_DEST (set),
1303 static_chain_incoming_rtx))
1304 break;
1306 /* If this is setting the static chain pseudo, set it from
1307 the value we want to give it instead. */
1308 else if (static_chain_value != 0
1309 && set != 0
1310 && rtx_equal_p (SET_SRC (set),
1311 static_chain_incoming_rtx))
1313 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1315 copy = emit_move_insn (newdest, static_chain_value);
1316 static_chain_value = 0;
1319 /* If this is setting the virtual stack vars register, this must
1320 be the code at the handler for a builtin longjmp. The value
1321 saved in the setjmp buffer will be the address of the frame
1322 we've made for this inlined instance within our frame. But we
1323 know the offset of that value so we can use it to reconstruct
1324 our virtual stack vars register from that value. If we are
1325 copying it from the stack pointer, leave it unchanged. */
1326 else if (set != 0
1327 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1329 HOST_WIDE_INT offset;
1330 temp = map->reg_map[REGNO (SET_DEST (set))];
1331 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1332 REGNO (temp)).rtx;
1334 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1335 offset = 0;
1336 else if (GET_CODE (temp) == PLUS
1337 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1338 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1339 offset = INTVAL (XEXP (temp, 1));
1340 else
1341 abort ();
1343 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1344 temp = SET_SRC (set);
1345 else
1346 temp = force_operand (plus_constant (SET_SRC (set),
1347 - offset),
1348 NULL_RTX);
1350 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1353 else
1354 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1355 /* REG_NOTES will be copied later. */
1357 #ifdef HAVE_cc0
1358 /* If this insn is setting CC0, it may need to look at
1359 the insn that uses CC0 to see what type of insn it is.
1360 In that case, the call to recog via validate_change will
1361 fail. So don't substitute constants here. Instead,
1362 do it when we emit the following insn.
1364 For example, see the pyr.md file. That machine has signed and
1365 unsigned compares. The compare patterns must check the
1366 following branch insn to see which what kind of compare to
1367 emit.
1369 If the previous insn set CC0, substitute constants on it as
1370 well. */
1371 if (sets_cc0_p (PATTERN (copy)) != 0)
1372 cc0_insn = copy;
1373 else
1375 if (cc0_insn)
1376 try_constants (cc0_insn, map);
1377 cc0_insn = 0;
1378 try_constants (copy, map);
1380 #else
1381 try_constants (copy, map);
1382 #endif
1383 break;
1385 case JUMP_INSN:
1386 if (map->integrating && returnjump_p (insn))
1388 if (map->local_return_label == 0)
1389 map->local_return_label = gen_label_rtx ();
1390 pattern = gen_jump (map->local_return_label);
1392 else
1393 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1395 copy = emit_jump_insn (pattern);
1397 #ifdef HAVE_cc0
1398 if (cc0_insn)
1399 try_constants (cc0_insn, map);
1400 cc0_insn = 0;
1401 #endif
1402 try_constants (copy, map);
1404 /* If this used to be a conditional jump insn but whose branch
1405 direction is now know, we must do something special. */
1406 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1408 #ifdef HAVE_cc0
1409 /* If the previous insn set cc0 for us, delete it. */
1410 if (sets_cc0_p (PREV_INSN (copy)))
1411 delete_insn (PREV_INSN (copy));
1412 #endif
1414 /* If this is now a no-op, delete it. */
1415 if (map->last_pc_value == pc_rtx)
1417 delete_insn (copy);
1418 copy = 0;
1420 else
1421 /* Otherwise, this is unconditional jump so we must put a
1422 BARRIER after it. We could do some dead code elimination
1423 here, but jump.c will do it just as well. */
1424 emit_barrier ();
1426 break;
1428 case CALL_INSN:
1429 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1430 three attached sequences: normal call, sibling call and tail
1431 recursion. */
1432 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1434 rtx sequence[3];
1435 rtx tail_label;
1437 for (i = 0; i < 3; i++)
1439 rtx seq;
1441 sequence[i] = NULL_RTX;
1442 seq = XEXP (PATTERN (insn), i);
1443 if (seq)
1445 start_sequence ();
1446 copy_insn_list (seq, map, static_chain_value);
1447 sequence[i] = get_insns ();
1448 end_sequence ();
1452 /* Find the new tail recursion label.
1453 It will already be substituted into sequence[2]. */
1454 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1455 map, 0);
1457 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1458 sequence[0],
1459 sequence[1],
1460 sequence[2],
1461 tail_label));
1462 break;
1465 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1466 copy = emit_call_insn (pattern);
1468 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1469 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1471 /* Because the USAGE information potentially contains objects other
1472 than hard registers, we need to copy it. */
1474 CALL_INSN_FUNCTION_USAGE (copy)
1475 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1476 map, 0);
1478 #ifdef HAVE_cc0
1479 if (cc0_insn)
1480 try_constants (cc0_insn, map);
1481 cc0_insn = 0;
1482 #endif
1483 try_constants (copy, map);
1485 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1486 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1487 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1488 break;
1490 case CODE_LABEL:
1491 copy = emit_label (get_label_from_map (map,
1492 CODE_LABEL_NUMBER (insn)));
1493 LABEL_NAME (copy) = LABEL_NAME (insn);
1494 map->const_age++;
1495 break;
1497 case BARRIER:
1498 copy = emit_barrier ();
1499 break;
1501 case NOTE:
1502 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1503 discarded because it is important to have only one of
1504 each in the current function.
1506 NOTE_INSN_DELETED notes aren't useful.
1508 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1509 pointer (which will soon be dangling) confuses flow's
1510 attempts to preserve bb structures during the compilation
1511 of a function. */
1513 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1514 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1515 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1516 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1518 copy = emit_note (NOTE_SOURCE_FILE (insn),
1519 NOTE_LINE_NUMBER (insn));
1520 if (copy
1521 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1522 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1523 && NOTE_BLOCK (insn))
1525 tree *mapped_block_p;
1527 mapped_block_p
1528 = (tree *) bsearch (NOTE_BLOCK (insn),
1529 &VARRAY_TREE (map->block_map, 0),
1530 map->block_map->elements_used,
1531 sizeof (tree),
1532 find_block);
1534 if (!mapped_block_p)
1535 abort ();
1536 else
1537 NOTE_BLOCK (copy) = *mapped_block_p;
1539 else if (copy
1540 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1541 NOTE_EXPECTED_VALUE (copy)
1542 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1543 map, 0);
1545 else
1546 copy = 0;
1547 break;
1549 default:
1550 abort ();
1553 if (copy)
1554 RTX_INTEGRATED_P (copy) = 1;
1556 map->insn_map[INSN_UID (insn)] = copy;
1560 /* Copy the REG_NOTES. Increment const_age, so that only constants
1561 from parameters can be substituted in. These are the only ones
1562 that are valid across the entire function. */
1564 static void
1565 copy_insn_notes (insns, map, eh_region_offset)
1566 rtx insns;
1567 struct inline_remap *map;
1568 int eh_region_offset;
1570 rtx insn, new_insn;
1572 map->const_age++;
1573 for (insn = insns; insn; insn = NEXT_INSN (insn))
1575 if (! INSN_P (insn))
1576 continue;
1578 new_insn = map->insn_map[INSN_UID (insn)];
1579 if (! new_insn)
1580 continue;
1582 if (REG_NOTES (insn))
1584 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1586 /* We must also do subst_constants, in case one of our parameters
1587 has const type and constant value. */
1588 subst_constants (&note, NULL_RTX, map, 0);
1589 apply_change_group ();
1590 REG_NOTES (new_insn) = note;
1592 /* Delete any REG_LABEL notes from the chain. Remap any
1593 REG_EH_REGION notes. */
1594 for (; note; note = next)
1596 next = XEXP (note, 1);
1597 if (REG_NOTE_KIND (note) == REG_LABEL)
1598 remove_note (new_insn, note);
1599 else if (REG_NOTE_KIND (note) == REG_EH_REGION)
1600 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1601 + eh_region_offset);
1605 if (GET_CODE (insn) == CALL_INSN
1606 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1608 int i;
1609 for (i = 0; i < 3; i++)
1610 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1613 if (GET_CODE (insn) == JUMP_INSN
1614 && GET_CODE (PATTERN (insn)) == RESX)
1615 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1619 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1620 push all of those decls and give each one the corresponding home. */
1622 static void
1623 integrate_parm_decls (args, map, arg_vector)
1624 tree args;
1625 struct inline_remap *map;
1626 rtvec arg_vector;
1628 register tree tail;
1629 register int i;
1631 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1633 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1634 current_function_decl);
1635 rtx new_decl_rtl
1636 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1638 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1639 here, but that's going to require some more work. */
1640 /* DECL_INCOMING_RTL (decl) = ?; */
1641 /* Fully instantiate the address with the equivalent form so that the
1642 debugging information contains the actual register, instead of the
1643 virtual register. Do this by not passing an insn to
1644 subst_constants. */
1645 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1646 apply_change_group ();
1647 SET_DECL_RTL (decl, new_decl_rtl);
1651 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1652 current function a tree of contexts isomorphic to the one that is given.
1654 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1655 registers used in the DECL_RTL field should be remapped. If it is zero,
1656 no mapping is necessary. */
1658 static tree
1659 integrate_decl_tree (let, map)
1660 tree let;
1661 struct inline_remap *map;
1663 tree t;
1664 tree new_block;
1665 tree *next;
1667 new_block = make_node (BLOCK);
1668 VARRAY_PUSH_TREE (map->block_map, new_block);
1669 next = &BLOCK_VARS (new_block);
1671 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1673 tree d;
1675 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1677 if (DECL_RTL_SET_P (t))
1679 rtx r;
1681 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1683 /* Fully instantiate the address with the equivalent form so that the
1684 debugging information contains the actual register, instead of the
1685 virtual register. Do this by not passing an insn to
1686 subst_constants. */
1687 r = DECL_RTL (d);
1688 subst_constants (&r, NULL_RTX, map, 1);
1689 SET_DECL_RTL (d, r);
1690 apply_change_group ();
1693 /* Add this declaration to the list of variables in the new
1694 block. */
1695 *next = d;
1696 next = &TREE_CHAIN (d);
1699 next = &BLOCK_SUBBLOCKS (new_block);
1700 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1702 *next = integrate_decl_tree (t, map);
1703 BLOCK_SUPERCONTEXT (*next) = new_block;
1704 next = &BLOCK_CHAIN (*next);
1707 TREE_USED (new_block) = TREE_USED (let);
1708 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1710 return new_block;
1713 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1714 except for those few rtx codes that are sharable.
1716 We always return an rtx that is similar to that incoming rtx, with the
1717 exception of possibly changing a REG to a SUBREG or vice versa. No
1718 rtl is ever emitted.
1720 If FOR_LHS is nonzero, if means we are processing something that will
1721 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1722 inlining since we need to be conservative in how it is set for
1723 such cases.
1725 Handle constants that need to be placed in the constant pool by
1726 calling `force_const_mem'. */
1729 copy_rtx_and_substitute (orig, map, for_lhs)
1730 register rtx orig;
1731 struct inline_remap *map;
1732 int for_lhs;
1734 register rtx copy, temp;
1735 register int i, j;
1736 register RTX_CODE code;
1737 register enum machine_mode mode;
1738 register const char *format_ptr;
1739 int regno;
1741 if (orig == 0)
1742 return 0;
1744 code = GET_CODE (orig);
1745 mode = GET_MODE (orig);
1747 switch (code)
1749 case REG:
1750 /* If the stack pointer register shows up, it must be part of
1751 stack-adjustments (*not* because we eliminated the frame pointer!).
1752 Small hard registers are returned as-is. Pseudo-registers
1753 go through their `reg_map'. */
1754 regno = REGNO (orig);
1755 if (regno <= LAST_VIRTUAL_REGISTER
1756 || (map->integrating
1757 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1759 /* Some hard registers are also mapped,
1760 but others are not translated. */
1761 if (map->reg_map[regno] != 0)
1762 return map->reg_map[regno];
1764 /* If this is the virtual frame pointer, make space in current
1765 function's stack frame for the stack frame of the inline function.
1767 Copy the address of this area into a pseudo. Map
1768 virtual_stack_vars_rtx to this pseudo and set up a constant
1769 equivalence for it to be the address. This will substitute the
1770 address into insns where it can be substituted and use the new
1771 pseudo where it can't. */
1772 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1774 rtx loc, seq;
1775 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1776 #ifdef FRAME_GROWS_DOWNWARD
1777 int alignment
1778 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1779 / BITS_PER_UNIT);
1781 /* In this case, virtual_stack_vars_rtx points to one byte
1782 higher than the top of the frame area. So make sure we
1783 allocate a big enough chunk to keep the frame pointer
1784 aligned like a real one. */
1785 if (alignment)
1786 size = CEIL_ROUND (size, alignment);
1787 #endif
1788 start_sequence ();
1789 loc = assign_stack_temp (BLKmode, size, 1);
1790 loc = XEXP (loc, 0);
1791 #ifdef FRAME_GROWS_DOWNWARD
1792 /* In this case, virtual_stack_vars_rtx points to one byte
1793 higher than the top of the frame area. So compute the offset
1794 to one byte higher than our substitute frame. */
1795 loc = plus_constant (loc, size);
1796 #endif
1797 map->reg_map[regno] = temp
1798 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1800 #ifdef STACK_BOUNDARY
1801 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1802 #endif
1804 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1806 seq = gen_sequence ();
1807 end_sequence ();
1808 emit_insn_after (seq, map->insns_at_start);
1809 return temp;
1811 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1812 || (map->integrating
1813 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1814 == orig)))
1816 /* Do the same for a block to contain any arguments referenced
1817 in memory. */
1818 rtx loc, seq;
1819 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1821 start_sequence ();
1822 loc = assign_stack_temp (BLKmode, size, 1);
1823 loc = XEXP (loc, 0);
1824 /* When arguments grow downward, the virtual incoming
1825 args pointer points to the top of the argument block,
1826 so the remapped location better do the same. */
1827 #ifdef ARGS_GROW_DOWNWARD
1828 loc = plus_constant (loc, size);
1829 #endif
1830 map->reg_map[regno] = temp
1831 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1833 #ifdef STACK_BOUNDARY
1834 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1835 #endif
1837 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1839 seq = gen_sequence ();
1840 end_sequence ();
1841 emit_insn_after (seq, map->insns_at_start);
1842 return temp;
1844 else if (REG_FUNCTION_VALUE_P (orig))
1846 /* This is a reference to the function return value. If
1847 the function doesn't have a return value, error. If the
1848 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1849 if (map->inline_target == 0)
1851 if (rtx_equal_function_value_matters)
1852 /* This is an ignored return value. We must not
1853 leave it in with REG_FUNCTION_VALUE_P set, since
1854 that would confuse subsequent inlining of the
1855 current function into a later function. */
1856 return gen_rtx_REG (GET_MODE (orig), regno);
1857 else
1858 /* Must be unrolling loops or replicating code if we
1859 reach here, so return the register unchanged. */
1860 return orig;
1862 else if (GET_MODE (map->inline_target) != BLKmode
1863 && mode != GET_MODE (map->inline_target))
1864 return gen_lowpart (mode, map->inline_target);
1865 else
1866 return map->inline_target;
1868 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1869 /* If leaf_renumber_regs_insn() might remap this register to
1870 some other number, make sure we don't share it with the
1871 inlined function, otherwise delayed optimization of the
1872 inlined function may change it in place, breaking our
1873 reference to it. We may still shared it within the
1874 function, so create an entry for this register in the
1875 reg_map. */
1876 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1877 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1879 if (!map->leaf_reg_map[regno][mode])
1880 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
1881 return map->leaf_reg_map[regno][mode];
1883 #endif
1884 else
1885 return orig;
1887 abort ();
1889 if (map->reg_map[regno] == NULL)
1891 map->reg_map[regno] = gen_reg_rtx (mode);
1892 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1893 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1894 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1895 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1897 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1898 mark_reg_pointer (map->reg_map[regno],
1899 map->regno_pointer_align[regno]);
1901 return map->reg_map[regno];
1903 case SUBREG:
1904 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1905 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1906 if (GET_CODE (copy) == SUBREG)
1908 int final_offset = SUBREG_BYTE (orig) + SUBREG_BYTE (copy);
1910 /* When working with SUBREGs the rule is that the byte
1911 offset must be a multiple of the SUBREG's mode. */
1912 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (orig)));
1913 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (orig)));
1914 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1915 final_offset);
1917 else if (GET_CODE (copy) == CONCAT)
1919 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1920 int final_offset;
1922 if (GET_MODE (retval) == GET_MODE (orig))
1923 return retval;
1925 final_offset = SUBREG_BYTE (orig) %
1926 GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)));
1927 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (orig)));
1928 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (orig)));
1929 return gen_rtx_SUBREG (GET_MODE (orig), retval, final_offset);
1931 else
1932 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1933 SUBREG_BYTE (orig));
1935 case ADDRESSOF:
1936 copy = gen_rtx_ADDRESSOF (mode,
1937 copy_rtx_and_substitute (XEXP (orig, 0),
1938 map, for_lhs),
1939 0, ADDRESSOF_DECL (orig));
1940 regno = ADDRESSOF_REGNO (orig);
1941 if (map->reg_map[regno])
1942 regno = REGNO (map->reg_map[regno]);
1943 else if (regno > LAST_VIRTUAL_REGISTER)
1945 temp = XEXP (orig, 0);
1946 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1947 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1948 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1949 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1950 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1952 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1953 mark_reg_pointer (map->reg_map[regno],
1954 map->regno_pointer_align[regno]);
1955 regno = REGNO (map->reg_map[regno]);
1957 ADDRESSOF_REGNO (copy) = regno;
1958 return copy;
1960 case USE:
1961 case CLOBBER:
1962 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1963 to (use foo) if the original insn didn't have a subreg.
1964 Removing the subreg distorts the VAX movstrhi pattern
1965 by changing the mode of an operand. */
1966 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1967 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1968 copy = SUBREG_REG (copy);
1969 return gen_rtx_fmt_e (code, VOIDmode, copy);
1971 case CODE_LABEL:
1972 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1973 = LABEL_PRESERVE_P (orig);
1974 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1976 /* We need to handle "deleted" labels that appear in the DECL_RTL
1977 of a LABEL_DECL. */
1978 case NOTE:
1979 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1980 return map->insn_map[INSN_UID (orig)];
1981 break;
1983 case LABEL_REF:
1984 copy
1985 = gen_rtx_LABEL_REF
1986 (mode,
1987 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1988 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1990 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1992 /* The fact that this label was previously nonlocal does not mean
1993 it still is, so we must check if it is within the range of
1994 this function's labels. */
1995 LABEL_REF_NONLOCAL_P (copy)
1996 = (LABEL_REF_NONLOCAL_P (orig)
1997 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1998 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2000 /* If we have made a nonlocal label local, it means that this
2001 inlined call will be referring to our nonlocal goto handler.
2002 So make sure we create one for this block; we normally would
2003 not since this is not otherwise considered a "call". */
2004 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2005 function_call_count++;
2007 return copy;
2009 case PC:
2010 case CC0:
2011 case CONST_INT:
2012 return orig;
2014 case SYMBOL_REF:
2015 /* Symbols which represent the address of a label stored in the constant
2016 pool must be modified to point to a constant pool entry for the
2017 remapped label. Otherwise, symbols are returned unchanged. */
2018 if (CONSTANT_POOL_ADDRESS_P (orig))
2020 struct function *f = inlining ? inlining : cfun;
2021 rtx constant = get_pool_constant_for_function (f, orig);
2022 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2023 if (inlining)
2025 rtx temp = force_const_mem (const_mode,
2026 copy_rtx_and_substitute (constant,
2027 map, 0));
2029 #if 0
2030 /* Legitimizing the address here is incorrect.
2032 Since we had a SYMBOL_REF before, we can assume it is valid
2033 to have one in this position in the insn.
2035 Also, change_address may create new registers. These
2036 registers will not have valid reg_map entries. This can
2037 cause try_constants() to fail because assumes that all
2038 registers in the rtx have valid reg_map entries, and it may
2039 end up replacing one of these new registers with junk. */
2041 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2042 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2043 #endif
2045 temp = XEXP (temp, 0);
2047 #ifdef POINTERS_EXTEND_UNSIGNED
2048 if (GET_MODE (temp) != GET_MODE (orig))
2049 temp = convert_memory_address (GET_MODE (orig), temp);
2050 #endif
2051 return temp;
2053 else if (GET_CODE (constant) == LABEL_REF)
2054 return XEXP (force_const_mem
2055 (GET_MODE (orig),
2056 copy_rtx_and_substitute (constant, map, for_lhs)),
2060 return orig;
2062 case CONST_DOUBLE:
2063 /* We have to make a new copy of this CONST_DOUBLE because don't want
2064 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2065 duplicate of a CONST_DOUBLE we have already seen. */
2066 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2068 REAL_VALUE_TYPE d;
2070 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2071 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2073 else
2074 return immed_double_const (CONST_DOUBLE_LOW (orig),
2075 CONST_DOUBLE_HIGH (orig), VOIDmode);
2077 case CONST:
2078 /* Make new constant pool entry for a constant
2079 that was in the pool of the inline function. */
2080 if (RTX_INTEGRATED_P (orig))
2081 abort ();
2082 break;
2084 case ASM_OPERANDS:
2085 /* If a single asm insn contains multiple output operands then
2086 it contains multiple ASM_OPERANDS rtx's that share the input
2087 and constraint vecs. We must make sure that the copied insn
2088 continues to share it. */
2089 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2091 copy = rtx_alloc (ASM_OPERANDS);
2092 copy->volatil = orig->volatil;
2093 PUT_MODE (copy, GET_MODE (orig));
2094 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2095 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2096 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2097 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2098 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2099 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2100 = map->copy_asm_constraints_vector;
2101 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2102 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2103 return copy;
2105 break;
2107 case CALL:
2108 /* This is given special treatment because the first
2109 operand of a CALL is a (MEM ...) which may get
2110 forced into a register for cse. This is undesirable
2111 if function-address cse isn't wanted or if we won't do cse. */
2112 #ifndef NO_FUNCTION_CSE
2113 if (! (optimize && ! flag_no_function_cse))
2114 #endif
2115 return
2116 gen_rtx_CALL
2117 (GET_MODE (orig),
2118 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2119 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2120 map, 0)),
2121 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2122 break;
2124 #if 0
2125 /* Must be ifdefed out for loop unrolling to work. */
2126 case RETURN:
2127 abort ();
2128 #endif
2130 case SET:
2131 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2132 Adjust the setting by the offset of the area we made.
2133 If the nonlocal goto is into the current function,
2134 this will result in unnecessarily bad code, but should work. */
2135 if (SET_DEST (orig) == virtual_stack_vars_rtx
2136 || SET_DEST (orig) == virtual_incoming_args_rtx)
2138 /* In case a translation hasn't occurred already, make one now. */
2139 rtx equiv_reg;
2140 rtx equiv_loc;
2141 HOST_WIDE_INT loc_offset;
2143 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2144 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2145 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2146 REGNO (equiv_reg)).rtx;
2147 loc_offset
2148 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2150 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2151 force_operand
2152 (plus_constant
2153 (copy_rtx_and_substitute (SET_SRC (orig),
2154 map, 0),
2155 - loc_offset),
2156 NULL_RTX));
2158 else
2159 return gen_rtx_SET (VOIDmode,
2160 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2161 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2162 break;
2164 case MEM:
2165 if (inlining
2166 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2167 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2169 enum machine_mode const_mode
2170 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2171 rtx constant
2172 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2174 constant = copy_rtx_and_substitute (constant, map, 0);
2176 /* If this was an address of a constant pool entry that itself
2177 had to be placed in the constant pool, it might not be a
2178 valid address. So the recursive call might have turned it
2179 into a register. In that case, it isn't a constant any
2180 more, so return it. This has the potential of changing a
2181 MEM into a REG, but we'll assume that it safe. */
2182 if (! CONSTANT_P (constant))
2183 return constant;
2185 return validize_mem (force_const_mem (const_mode, constant));
2188 copy = rtx_alloc (MEM);
2189 PUT_MODE (copy, mode);
2190 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2191 MEM_COPY_ATTRIBUTES (copy, orig);
2192 return copy;
2194 default:
2195 break;
2198 copy = rtx_alloc (code);
2199 PUT_MODE (copy, mode);
2200 copy->in_struct = orig->in_struct;
2201 copy->volatil = orig->volatil;
2202 copy->unchanging = orig->unchanging;
2204 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2206 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2208 switch (*format_ptr++)
2210 case '0':
2211 /* Copy this through the wide int field; that's safest. */
2212 X0WINT (copy, i) = X0WINT (orig, i);
2213 break;
2215 case 'e':
2216 XEXP (copy, i)
2217 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2218 break;
2220 case 'u':
2221 /* Change any references to old-insns to point to the
2222 corresponding copied insns. */
2223 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2224 break;
2226 case 'E':
2227 XVEC (copy, i) = XVEC (orig, i);
2228 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2230 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2231 for (j = 0; j < XVECLEN (copy, i); j++)
2232 XVECEXP (copy, i, j)
2233 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2234 map, for_lhs);
2236 break;
2238 case 'w':
2239 XWINT (copy, i) = XWINT (orig, i);
2240 break;
2242 case 'i':
2243 XINT (copy, i) = XINT (orig, i);
2244 break;
2246 case 's':
2247 XSTR (copy, i) = XSTR (orig, i);
2248 break;
2250 case 't':
2251 XTREE (copy, i) = XTREE (orig, i);
2252 break;
2254 default:
2255 abort ();
2259 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2261 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2262 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2263 map->copy_asm_constraints_vector
2264 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2267 return copy;
2270 /* Substitute known constant values into INSN, if that is valid. */
2272 void
2273 try_constants (insn, map)
2274 rtx insn;
2275 struct inline_remap *map;
2277 int i;
2279 map->num_sets = 0;
2281 /* First try just updating addresses, then other things. This is
2282 important when we have something like the store of a constant
2283 into memory and we can update the memory address but the machine
2284 does not support a constant source. */
2285 subst_constants (&PATTERN (insn), insn, map, 1);
2286 apply_change_group ();
2287 subst_constants (&PATTERN (insn), insn, map, 0);
2288 apply_change_group ();
2290 /* Show we don't know the value of anything stored or clobbered. */
2291 note_stores (PATTERN (insn), mark_stores, NULL);
2292 map->last_pc_value = 0;
2293 #ifdef HAVE_cc0
2294 map->last_cc0_value = 0;
2295 #endif
2297 /* Set up any constant equivalences made in this insn. */
2298 for (i = 0; i < map->num_sets; i++)
2300 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2302 int regno = REGNO (map->equiv_sets[i].dest);
2304 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2305 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2306 /* Following clause is a hack to make case work where GNU C++
2307 reassigns a variable to make cse work right. */
2308 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2309 regno).rtx,
2310 map->equiv_sets[i].equiv))
2311 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2312 map->equiv_sets[i].equiv, map->const_age);
2314 else if (map->equiv_sets[i].dest == pc_rtx)
2315 map->last_pc_value = map->equiv_sets[i].equiv;
2316 #ifdef HAVE_cc0
2317 else if (map->equiv_sets[i].dest == cc0_rtx)
2318 map->last_cc0_value = map->equiv_sets[i].equiv;
2319 #endif
2323 /* Substitute known constants for pseudo regs in the contents of LOC,
2324 which are part of INSN.
2325 If INSN is zero, the substitution should always be done (this is used to
2326 update DECL_RTL).
2327 These changes are taken out by try_constants if the result is not valid.
2329 Note that we are more concerned with determining when the result of a SET
2330 is a constant, for further propagation, than actually inserting constants
2331 into insns; cse will do the latter task better.
2333 This function is also used to adjust address of items previously addressed
2334 via the virtual stack variable or virtual incoming arguments registers.
2336 If MEMONLY is nonzero, only make changes inside a MEM. */
2338 static void
2339 subst_constants (loc, insn, map, memonly)
2340 rtx *loc;
2341 rtx insn;
2342 struct inline_remap *map;
2343 int memonly;
2345 rtx x = *loc;
2346 register int i, j;
2347 register enum rtx_code code;
2348 register const char *format_ptr;
2349 int num_changes = num_validated_changes ();
2350 rtx new = 0;
2351 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2353 code = GET_CODE (x);
2355 switch (code)
2357 case PC:
2358 case CONST_INT:
2359 case CONST_DOUBLE:
2360 case SYMBOL_REF:
2361 case CONST:
2362 case LABEL_REF:
2363 case ADDRESS:
2364 return;
2366 #ifdef HAVE_cc0
2367 case CC0:
2368 if (! memonly)
2369 validate_change (insn, loc, map->last_cc0_value, 1);
2370 return;
2371 #endif
2373 case USE:
2374 case CLOBBER:
2375 /* The only thing we can do with a USE or CLOBBER is possibly do
2376 some substitutions in a MEM within it. */
2377 if (GET_CODE (XEXP (x, 0)) == MEM)
2378 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2379 return;
2381 case REG:
2382 /* Substitute for parms and known constants. Don't replace
2383 hard regs used as user variables with constants. */
2384 if (! memonly)
2386 int regno = REGNO (x);
2387 struct const_equiv_data *p;
2389 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2390 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2391 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2392 p->rtx != 0)
2393 && p->age >= map->const_age)
2394 validate_change (insn, loc, p->rtx, 1);
2396 return;
2398 case SUBREG:
2399 /* SUBREG applied to something other than a reg
2400 should be treated as ordinary, since that must
2401 be a special hack and we don't know how to treat it specially.
2402 Consider for example mulsidi3 in m68k.md.
2403 Ordinary SUBREG of a REG needs this special treatment. */
2404 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2406 rtx inner = SUBREG_REG (x);
2407 rtx new = 0;
2409 /* We can't call subst_constants on &SUBREG_REG (x) because any
2410 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2411 see what is inside, try to form the new SUBREG and see if that is
2412 valid. We handle two cases: extracting a full word in an
2413 integral mode and extracting the low part. */
2414 subst_constants (&inner, NULL_RTX, map, 0);
2416 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2417 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2418 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2419 new = operand_subword (inner, SUBREG_BYTE (x) / UNITS_PER_WORD,
2420 0, GET_MODE (SUBREG_REG (x)));
2422 cancel_changes (num_changes);
2423 if (new == 0 && subreg_lowpart_p (x))
2424 new = gen_lowpart_common (GET_MODE (x), inner);
2426 if (new)
2427 validate_change (insn, loc, new, 1);
2429 return;
2431 break;
2433 case MEM:
2434 subst_constants (&XEXP (x, 0), insn, map, 0);
2436 /* If a memory address got spoiled, change it back. */
2437 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2438 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2439 cancel_changes (num_changes);
2440 return;
2442 case SET:
2444 /* Substitute constants in our source, and in any arguments to a
2445 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2446 itself. */
2447 rtx *dest_loc = &SET_DEST (x);
2448 rtx dest = *dest_loc;
2449 rtx src, tem;
2450 enum machine_mode compare_mode = VOIDmode;
2452 /* If SET_SRC is a COMPARE which subst_constants would turn into
2453 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2454 is to be done. */
2455 if (GET_CODE (SET_SRC (x)) == COMPARE)
2457 src = SET_SRC (x);
2458 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2459 #ifdef HAVE_cc0
2460 || dest == cc0_rtx
2461 #endif
2464 compare_mode = GET_MODE (XEXP (src, 0));
2465 if (compare_mode == VOIDmode)
2466 compare_mode = GET_MODE (XEXP (src, 1));
2470 subst_constants (&SET_SRC (x), insn, map, memonly);
2471 src = SET_SRC (x);
2473 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2474 || GET_CODE (*dest_loc) == SUBREG
2475 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2477 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2479 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2480 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2482 dest_loc = &XEXP (*dest_loc, 0);
2485 /* Do substitute in the address of a destination in memory. */
2486 if (GET_CODE (*dest_loc) == MEM)
2487 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2489 /* Check for the case of DEST a SUBREG, both it and the underlying
2490 register are less than one word, and the SUBREG has the wider mode.
2491 In the case, we are really setting the underlying register to the
2492 source converted to the mode of DEST. So indicate that. */
2493 if (GET_CODE (dest) == SUBREG
2494 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2495 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2496 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2497 <= GET_MODE_SIZE (GET_MODE (dest)))
2498 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2499 src)))
2500 src = tem, dest = SUBREG_REG (dest);
2502 /* If storing a recognizable value save it for later recording. */
2503 if ((map->num_sets < MAX_RECOG_OPERANDS)
2504 && (CONSTANT_P (src)
2505 || (GET_CODE (src) == REG
2506 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2507 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2508 || (GET_CODE (src) == PLUS
2509 && GET_CODE (XEXP (src, 0)) == REG
2510 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2511 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2512 && CONSTANT_P (XEXP (src, 1)))
2513 || GET_CODE (src) == COMPARE
2514 #ifdef HAVE_cc0
2515 || dest == cc0_rtx
2516 #endif
2517 || (dest == pc_rtx
2518 && (src == pc_rtx || GET_CODE (src) == RETURN
2519 || GET_CODE (src) == LABEL_REF))))
2521 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2522 it will cause us to save the COMPARE with any constants
2523 substituted, which is what we want for later. */
2524 rtx src_copy = copy_rtx (src);
2525 map->equiv_sets[map->num_sets].equiv = src_copy;
2526 map->equiv_sets[map->num_sets++].dest = dest;
2527 if (compare_mode != VOIDmode
2528 && GET_CODE (src) == COMPARE
2529 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2530 #ifdef HAVE_cc0
2531 || dest == cc0_rtx
2532 #endif
2534 && GET_MODE (XEXP (src, 0)) == VOIDmode
2535 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2537 map->compare_src = src_copy;
2538 map->compare_mode = compare_mode;
2542 return;
2544 default:
2545 break;
2548 format_ptr = GET_RTX_FORMAT (code);
2550 /* If the first operand is an expression, save its mode for later. */
2551 if (*format_ptr == 'e')
2552 op0_mode = GET_MODE (XEXP (x, 0));
2554 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2556 switch (*format_ptr++)
2558 case '0':
2559 break;
2561 case 'e':
2562 if (XEXP (x, i))
2563 subst_constants (&XEXP (x, i), insn, map, memonly);
2564 break;
2566 case 'u':
2567 case 'i':
2568 case 's':
2569 case 'w':
2570 case 'n':
2571 case 't':
2572 break;
2574 case 'E':
2575 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2576 for (j = 0; j < XVECLEN (x, i); j++)
2577 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2579 break;
2581 default:
2582 abort ();
2586 /* If this is a commutative operation, move a constant to the second
2587 operand unless the second operand is already a CONST_INT. */
2588 if (! memonly
2589 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2590 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2592 rtx tem = XEXP (x, 0);
2593 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2594 validate_change (insn, &XEXP (x, 1), tem, 1);
2597 /* Simplify the expression in case we put in some constants. */
2598 if (! memonly)
2599 switch (GET_RTX_CLASS (code))
2601 case '1':
2602 if (op0_mode == MAX_MACHINE_MODE)
2603 abort ();
2604 new = simplify_unary_operation (code, GET_MODE (x),
2605 XEXP (x, 0), op0_mode);
2606 break;
2608 case '<':
2610 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2612 if (op_mode == VOIDmode)
2613 op_mode = GET_MODE (XEXP (x, 1));
2614 new = simplify_relational_operation (code, op_mode,
2615 XEXP (x, 0), XEXP (x, 1));
2616 #ifdef FLOAT_STORE_FLAG_VALUE
2617 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2619 enum machine_mode mode = GET_MODE (x);
2620 if (new == const0_rtx)
2621 new = CONST0_RTX (mode);
2622 else
2624 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2625 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2628 #endif
2629 break;
2632 case '2':
2633 case 'c':
2634 new = simplify_binary_operation (code, GET_MODE (x),
2635 XEXP (x, 0), XEXP (x, 1));
2636 break;
2638 case 'b':
2639 case '3':
2640 if (op0_mode == MAX_MACHINE_MODE)
2641 abort ();
2643 if (code == IF_THEN_ELSE)
2645 rtx op0 = XEXP (x, 0);
2647 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2648 && GET_MODE (op0) == VOIDmode
2649 && ! side_effects_p (op0)
2650 && XEXP (op0, 0) == map->compare_src
2651 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2653 /* We have compare of two VOIDmode constants for which
2654 we recorded the comparison mode. */
2655 rtx temp =
2656 simplify_relational_operation (GET_CODE (op0),
2657 map->compare_mode,
2658 XEXP (op0, 0),
2659 XEXP (op0, 1));
2661 if (temp == const0_rtx)
2662 new = XEXP (x, 2);
2663 else if (temp == const1_rtx)
2664 new = XEXP (x, 1);
2667 if (!new)
2668 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2669 XEXP (x, 0), XEXP (x, 1),
2670 XEXP (x, 2));
2671 break;
2674 if (new)
2675 validate_change (insn, loc, new, 1);
2678 /* Show that register modified no longer contain known constants. We are
2679 called from note_stores with parts of the new insn. */
2681 static void
2682 mark_stores (dest, x, data)
2683 rtx dest;
2684 rtx x ATTRIBUTE_UNUSED;
2685 void *data ATTRIBUTE_UNUSED;
2687 int regno = -1;
2688 enum machine_mode mode = VOIDmode;
2690 /* DEST is always the innermost thing set, except in the case of
2691 SUBREGs of hard registers. */
2693 if (GET_CODE (dest) == REG)
2694 regno = REGNO (dest), mode = GET_MODE (dest);
2695 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2697 regno = REGNO (SUBREG_REG (dest));
2698 if (regno < FIRST_PSEUDO_REGISTER)
2699 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2700 GET_MODE (SUBREG_REG (dest)),
2701 SUBREG_BYTE (dest),
2702 GET_MODE (dest));
2703 mode = GET_MODE (SUBREG_REG (dest));
2706 if (regno >= 0)
2708 unsigned int uregno = regno;
2709 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2710 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2711 unsigned int i;
2713 /* Ignore virtual stack var or virtual arg register since those
2714 are handled separately. */
2715 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2716 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2717 for (i = uregno; i <= last_reg; i++)
2718 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2719 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2723 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2724 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2725 that it points to the node itself, thus indicating that the node is its
2726 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2727 the given node is NULL, recursively descend the decl/block tree which
2728 it is the root of, and for each other ..._DECL or BLOCK node contained
2729 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2730 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2731 values to point to themselves. */
2733 static void
2734 set_block_origin_self (stmt)
2735 register tree stmt;
2737 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2739 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2742 register tree local_decl;
2744 for (local_decl = BLOCK_VARS (stmt);
2745 local_decl != NULL_TREE;
2746 local_decl = TREE_CHAIN (local_decl))
2747 set_decl_origin_self (local_decl); /* Potential recursion. */
2751 register tree subblock;
2753 for (subblock = BLOCK_SUBBLOCKS (stmt);
2754 subblock != NULL_TREE;
2755 subblock = BLOCK_CHAIN (subblock))
2756 set_block_origin_self (subblock); /* Recurse. */
2761 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2762 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2763 node to so that it points to the node itself, thus indicating that the
2764 node represents its own (abstract) origin. Additionally, if the
2765 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2766 the decl/block tree of which the given node is the root of, and for
2767 each other ..._DECL or BLOCK node contained therein whose
2768 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2769 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2770 point to themselves. */
2772 void
2773 set_decl_origin_self (decl)
2774 register tree decl;
2776 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2778 DECL_ABSTRACT_ORIGIN (decl) = decl;
2779 if (TREE_CODE (decl) == FUNCTION_DECL)
2781 register tree arg;
2783 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2784 DECL_ABSTRACT_ORIGIN (arg) = arg;
2785 if (DECL_INITIAL (decl) != NULL_TREE
2786 && DECL_INITIAL (decl) != error_mark_node)
2787 set_block_origin_self (DECL_INITIAL (decl));
2792 /* Given a pointer to some BLOCK node, and a boolean value to set the
2793 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2794 the given block, and for all local decls and all local sub-blocks
2795 (recursively) which are contained therein. */
2797 static void
2798 set_block_abstract_flags (stmt, setting)
2799 register tree stmt;
2800 register int setting;
2802 register tree local_decl;
2803 register tree subblock;
2805 BLOCK_ABSTRACT (stmt) = setting;
2807 for (local_decl = BLOCK_VARS (stmt);
2808 local_decl != NULL_TREE;
2809 local_decl = TREE_CHAIN (local_decl))
2810 set_decl_abstract_flags (local_decl, setting);
2812 for (subblock = BLOCK_SUBBLOCKS (stmt);
2813 subblock != NULL_TREE;
2814 subblock = BLOCK_CHAIN (subblock))
2815 set_block_abstract_flags (subblock, setting);
2818 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2819 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2820 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2821 set the abstract flags for all of the parameters, local vars, local
2822 blocks and sub-blocks (recursively) to the same setting. */
2824 void
2825 set_decl_abstract_flags (decl, setting)
2826 register tree decl;
2827 register int setting;
2829 DECL_ABSTRACT (decl) = setting;
2830 if (TREE_CODE (decl) == FUNCTION_DECL)
2832 register tree arg;
2834 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2835 DECL_ABSTRACT (arg) = setting;
2836 if (DECL_INITIAL (decl) != NULL_TREE
2837 && DECL_INITIAL (decl) != error_mark_node)
2838 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2842 /* Output the assembly language code for the function FNDECL
2843 from its DECL_SAVED_INSNS. Used for inline functions that are output
2844 at end of compilation instead of where they came in the source. */
2846 void
2847 output_inline_function (fndecl)
2848 tree fndecl;
2850 struct function *old_cfun = cfun;
2851 enum debug_info_type old_write_symbols = write_symbols;
2852 struct function *f = DECL_SAVED_INSNS (fndecl);
2854 cfun = f;
2855 current_function_decl = fndecl;
2856 clear_emit_caches ();
2858 set_new_last_label_num (f->inl_max_label_num);
2860 /* We're not deferring this any longer. */
2861 DECL_DEFER_OUTPUT (fndecl) = 0;
2863 /* If requested, suppress debugging information. */
2864 if (f->no_debugging_symbols)
2865 write_symbols = NO_DEBUG;
2867 /* Do any preparation, such as emitting abstract debug info for the inline
2868 before it gets mangled by optimization. */
2869 note_outlining_of_inline_function (fndecl);
2871 /* Compile this function all the way down to assembly code. */
2872 rest_of_compilation (fndecl);
2874 /* We can't inline this anymore. */
2875 f->inlinable = 0;
2876 DECL_INLINE (fndecl) = 0;
2878 cfun = old_cfun;
2879 current_function_decl = old_cfun ? old_cfun->decl : 0;
2880 write_symbols = old_write_symbols;