* doc/install.texi: Update details of what components are included
[official-gcc.git] / gcc / integrate.c
blobf4246f06dc40db30c0abf7ff72525307ea024b42
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "intl.h"
41 #include "loop.h"
42 #include "params.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
69 #endif
71 static rtvec initialize_for_inline PARAMS ((tree));
72 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
74 rtvec));
75 static tree integrate_decl_tree PARAMS ((tree,
76 struct inline_remap *));
77 static void subst_constants PARAMS ((rtx *, rtx,
78 struct inline_remap *, int));
79 static void set_block_origin_self PARAMS ((tree));
80 static void set_block_abstract_flags PARAMS ((tree, int));
81 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
82 rtx));
83 void set_decl_abstract_flags PARAMS ((tree, int));
84 static void mark_stores PARAMS ((rtx, rtx, void *));
85 static void save_parm_insns PARAMS ((rtx, rtx));
86 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
87 rtx));
88 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
89 int));
90 static int compare_blocks PARAMS ((const PTR, const PTR));
91 static int find_block PARAMS ((const PTR, const PTR));
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
109 rtx x = map->label_map[i];
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx ();
114 return x;
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
129 /* For functions marked as inline increase the maximum size to
130 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (MAX_INLINE_INSNS
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
138 register int ninsns = 0;
139 register tree parms;
141 if (DECL_UNINLINABLE (fndecl))
142 return N_("function cannot be inline");
144 /* No inlines with varargs. */
145 if ((last && TREE_VALUE (last) != void_type_node)
146 || current_function_varargs)
147 return N_("varargs function cannot be inline");
149 if (current_function_calls_alloca)
150 return N_("function using alloca cannot be inline");
152 if (current_function_calls_setjmp)
153 return N_("function using setjmp cannot be inline");
155 if (current_function_calls_eh_return)
156 return N_("function uses __builtin_eh_return");
158 if (current_function_contains_functions)
159 return N_("function with nested functions cannot be inline");
161 if (forced_labels)
162 return
163 N_("function with label addresses used in initializers cannot inline");
165 if (current_function_cannot_inline)
166 return current_function_cannot_inline;
168 /* If its not even close, don't even look. */
169 if (get_max_uid () > 3 * max_insns)
170 return N_("function too large to be inline");
172 #if 0
173 /* Don't inline functions which do not specify a function prototype and
174 have BLKmode argument or take the address of a parameter. */
175 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
177 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
178 TREE_ADDRESSABLE (parms) = 1;
179 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
180 return N_("no prototype, and parameter address used; cannot be inline");
182 #endif
184 /* We can't inline functions that return structures
185 the old-fashioned PCC way, copying into a static block. */
186 if (current_function_returns_pcc_struct)
187 return N_("inline functions not supported for this return value type");
189 /* We can't inline functions that return structures of varying size. */
190 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
191 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
192 return N_("function with varying-size return value cannot be inline");
194 /* Cannot inline a function with a varying size argument or one that
195 receives a transparent union. */
196 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
198 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
199 return N_("function with varying-size parameter cannot be inline");
200 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
201 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
202 return N_("function with transparent unit parameter cannot be inline");
205 if (get_max_uid () > max_insns)
207 for (ninsns = 0, insn = get_first_nonparm_insn ();
208 insn && ninsns < max_insns;
209 insn = NEXT_INSN (insn))
210 if (INSN_P (insn))
211 ninsns++;
213 if (ninsns >= max_insns)
214 return N_("function too large to be inline");
217 /* We will not inline a function which uses computed goto. The addresses of
218 its local labels, which may be tucked into global storage, are of course
219 not constant across instantiations, which causes unexpected behaviour. */
220 if (current_function_has_computed_jump)
221 return N_("function with computed jump cannot inline");
223 /* We cannot inline a nested function that jumps to a nonlocal label. */
224 if (current_function_has_nonlocal_goto)
225 return N_("function with nonlocal goto cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
230 rtx result = DECL_RTL (DECL_RESULT (fndecl));
231 if (GET_CODE (result) == PARALLEL)
232 return N_("inline functions not supported for this return value type");
235 /* If the function has a target specific attribute attached to it,
236 then we assume that we should not inline it. This can be overriden
237 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
238 if (DECL_MACHINE_ATTRIBUTES (fndecl)
239 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
240 return N_("function with target specific attribute(s) cannot be inlined");
242 return NULL;
245 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
246 Zero for a reg that isn't a parm's home.
247 Only reg numbers less than max_parm_reg are mapped here. */
248 static tree *parmdecl_map;
250 /* In save_for_inline, nonzero if past the parm-initialization insns. */
251 static int in_nonparm_insns;
253 /* Subroutine for `save_for_inline'. Performs initialization
254 needed to save FNDECL's insns and info for future inline expansion. */
256 static rtvec
257 initialize_for_inline (fndecl)
258 tree fndecl;
260 int i;
261 rtvec arg_vector;
262 tree parms;
264 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
265 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
266 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
268 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
269 parms;
270 parms = TREE_CHAIN (parms), i++)
272 rtx p = DECL_RTL (parms);
274 /* If we have (mem (addressof (mem ...))), use the inner MEM since
275 otherwise the copy_rtx call below will not unshare the MEM since
276 it shares ADDRESSOF. */
277 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
278 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
279 p = XEXP (XEXP (p, 0), 0);
281 RTVEC_ELT (arg_vector, i) = p;
283 if (GET_CODE (p) == REG)
284 parmdecl_map[REGNO (p)] = parms;
285 else if (GET_CODE (p) == CONCAT)
287 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
288 rtx pimag = gen_imagpart (GET_MODE (preal), p);
290 if (GET_CODE (preal) == REG)
291 parmdecl_map[REGNO (preal)] = parms;
292 if (GET_CODE (pimag) == REG)
293 parmdecl_map[REGNO (pimag)] = parms;
296 /* This flag is cleared later
297 if the function ever modifies the value of the parm. */
298 TREE_READONLY (parms) = 1;
301 return arg_vector;
304 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
305 originally was in the FROM_FN, but now it will be in the
306 TO_FN. */
308 tree
309 copy_decl_for_inlining (decl, from_fn, to_fn)
310 tree decl;
311 tree from_fn;
312 tree to_fn;
314 tree copy;
316 /* Copy the declaration. */
317 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
319 /* For a parameter, we must make an equivalent VAR_DECL, not a
320 new PARM_DECL. */
321 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
322 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
323 TREE_READONLY (copy) = TREE_READONLY (decl);
324 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
326 else
328 copy = copy_node (decl);
329 if (DECL_LANG_SPECIFIC (copy))
330 copy_lang_decl (copy);
332 /* TREE_ADDRESSABLE isn't used to indicate that a label's
333 address has been taken; it's for internal bookkeeping in
334 expand_goto_internal. */
335 if (TREE_CODE (copy) == LABEL_DECL)
336 TREE_ADDRESSABLE (copy) = 0;
339 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
340 declaration inspired this copy. */
341 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
343 /* The new variable/label has no RTL, yet. */
344 SET_DECL_RTL (copy, NULL_RTX);
346 /* These args would always appear unused, if not for this. */
347 TREE_USED (copy) = 1;
349 /* Set the context for the new declaration. */
350 if (!DECL_CONTEXT (decl))
351 /* Globals stay global. */
353 else if (DECL_CONTEXT (decl) != from_fn)
354 /* Things that weren't in the scope of the function we're inlining
355 from aren't in the scope we're inlining too, either. */
357 else if (TREE_STATIC (decl))
358 /* Function-scoped static variables should say in the original
359 function. */
361 else
362 /* Ordinary automatic local variables are now in the scope of the
363 new function. */
364 DECL_CONTEXT (copy) = to_fn;
366 return copy;
369 /* Make the insns and PARM_DECLs of the current function permanent
370 and record other information in DECL_SAVED_INSNS to allow inlining
371 of this function in subsequent calls.
373 This routine need not copy any insns because we are not going
374 to immediately compile the insns in the insn chain. There
375 are two cases when we would compile the insns for FNDECL:
376 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
377 be output at the end of other compilation, because somebody took
378 its address. In the first case, the insns of FNDECL are copied
379 as it is expanded inline, so FNDECL's saved insns are not
380 modified. In the second case, FNDECL is used for the last time,
381 so modifying the rtl is not a problem.
383 We don't have to worry about FNDECL being inline expanded by
384 other functions which are written at the end of compilation
385 because flag_no_inline is turned on when we begin writing
386 functions at the end of compilation. */
388 void
389 save_for_inline (fndecl)
390 tree fndecl;
392 rtx insn;
393 rtvec argvec;
394 rtx first_nonparm_insn;
396 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
397 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
398 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
399 for the parms, prior to elimination of virtual registers.
400 These values are needed for substituting parms properly. */
402 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
404 /* Make and emit a return-label if we have not already done so. */
406 if (return_label == 0)
408 return_label = gen_label_rtx ();
409 emit_label (return_label);
412 argvec = initialize_for_inline (fndecl);
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
418 insn = get_insns ();
419 if (GET_CODE (insn) != NOTE)
420 abort ();
422 /* Get the insn which signals the end of parameter setup code. */
423 first_nonparm_insn = get_first_nonparm_insn ();
425 /* Now just scan the chain of insns to see what happens to our
426 PARM_DECLs. If a PARM_DECL is used but never modified, we
427 can substitute its rtl directly when expanding inline (and
428 perform constant folding when its incoming value is constant).
429 Otherwise, we have to copy its value into a new register and track
430 the new register's life. */
431 in_nonparm_insns = 0;
432 save_parm_insns (insn, first_nonparm_insn);
434 cfun->inl_max_label_num = max_label_num ();
435 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
436 cfun->original_arg_vector = argvec;
437 cfun->original_decl_initial = DECL_INITIAL (fndecl);
438 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
439 DECL_SAVED_INSNS (fndecl) = cfun;
441 /* Clean up. */
442 free (parmdecl_map);
445 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
446 PARM_DECL is used but never modified, we can substitute its rtl directly
447 when expanding inline (and perform constant folding when its incoming
448 value is constant). Otherwise, we have to copy its value into a new
449 register and track the new register's life. */
451 static void
452 save_parm_insns (insn, first_nonparm_insn)
453 rtx insn;
454 rtx first_nonparm_insn;
456 if (insn == NULL_RTX)
457 return;
459 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
461 if (insn == first_nonparm_insn)
462 in_nonparm_insns = 1;
464 if (INSN_P (insn))
466 /* Record what interesting things happen to our parameters. */
467 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
469 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
470 three attached sequences: normal call, sibling call and tail
471 recursion. */
472 if (GET_CODE (insn) == CALL_INSN
473 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
475 int i;
477 for (i = 0; i < 3; i++)
478 save_parm_insns (XEXP (PATTERN (insn), i),
479 first_nonparm_insn);
485 /* Note whether a parameter is modified or not. */
487 static void
488 note_modified_parmregs (reg, x, data)
489 rtx reg;
490 rtx x ATTRIBUTE_UNUSED;
491 void *data ATTRIBUTE_UNUSED;
493 if (GET_CODE (reg) == REG && in_nonparm_insns
494 && REGNO (reg) < max_parm_reg
495 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
496 && parmdecl_map[REGNO (reg)] != 0)
497 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
500 /* Unfortunately, we need a global copy of const_equiv map for communication
501 with a function called from note_stores. Be *very* careful that this
502 is used properly in the presence of recursion. */
504 varray_type global_const_equiv_varray;
506 #define FIXED_BASE_PLUS_P(X) \
507 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
508 && GET_CODE (XEXP (X, 0)) == REG \
509 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
510 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
512 /* Called to set up a mapping for the case where a parameter is in a
513 register. If it is read-only and our argument is a constant, set up the
514 constant equivalence.
516 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
517 if it is a register.
519 Also, don't allow hard registers here; they might not be valid when
520 substituted into insns. */
521 static void
522 process_reg_param (map, loc, copy)
523 struct inline_remap *map;
524 rtx loc, copy;
526 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
527 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
528 && ! REG_USERVAR_P (copy))
529 || (GET_CODE (copy) == REG
530 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
532 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
533 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
534 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
535 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
536 copy = temp;
538 map->reg_map[REGNO (loc)] = copy;
541 /* Compare two BLOCKs for qsort. The key we sort on is the
542 BLOCK_ABSTRACT_ORIGIN of the blocks. */
544 static int
545 compare_blocks (v1, v2)
546 const PTR v1;
547 const PTR v2;
549 tree b1 = *((const tree *) v1);
550 tree b2 = *((const tree *) v2);
552 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
553 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
556 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
557 an original block; the second to a remapped equivalent. */
559 static int
560 find_block (v1, v2)
561 const PTR v1;
562 const PTR v2;
564 const union tree_node *b1 = (const union tree_node *) v1;
565 tree b2 = *((const tree *) v2);
567 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
570 /* Integrate the procedure defined by FNDECL. Note that this function
571 may wind up calling itself. Since the static variables are not
572 reentrant, we do not assign them until after the possibility
573 of recursion is eliminated.
575 If IGNORE is nonzero, do not produce a value.
576 Otherwise store the value in TARGET if it is nonzero and that is convenient.
578 Value is:
579 (rtx)-1 if we could not substitute the function
580 0 if we substituted it and it does not produce a value
581 else an rtx for where the value is stored. */
584 expand_inline_function (fndecl, parms, target, ignore, type,
585 structure_value_addr)
586 tree fndecl, parms;
587 rtx target;
588 int ignore;
589 tree type;
590 rtx structure_value_addr;
592 struct function *inlining_previous;
593 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
594 tree formal, actual, block;
595 rtx parm_insns = inl_f->emit->x_first_insn;
596 rtx insns = (inl_f->inl_last_parm_insn
597 ? NEXT_INSN (inl_f->inl_last_parm_insn)
598 : parm_insns);
599 tree *arg_trees;
600 rtx *arg_vals;
601 int max_regno;
602 register int i;
603 int min_labelno = inl_f->emit->x_first_label_num;
604 int max_labelno = inl_f->inl_max_label_num;
605 int nargs;
606 rtx loc;
607 rtx stack_save = 0;
608 rtx temp;
609 struct inline_remap *map = 0;
610 #ifdef HAVE_cc0
611 rtx cc0_insn = 0;
612 #endif
613 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
614 rtx static_chain_value = 0;
615 int inl_max_uid;
616 int eh_region_offset;
618 /* The pointer used to track the true location of the memory used
619 for MAP->LABEL_MAP. */
620 rtx *real_label_map = 0;
622 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
623 max_regno = inl_f->emit->x_reg_rtx_no + 3;
624 if (max_regno < FIRST_PSEUDO_REGISTER)
625 abort ();
627 /* Pull out the decl for the function definition; fndecl may be a
628 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
629 fndecl = inl_f->decl;
631 nargs = list_length (DECL_ARGUMENTS (fndecl));
633 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
634 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
636 /* Check that the parms type match and that sufficient arguments were
637 passed. Since the appropriate conversions or default promotions have
638 already been applied, the machine modes should match exactly. */
640 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
641 formal;
642 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
644 tree arg;
645 enum machine_mode mode;
647 if (actual == 0)
648 return (rtx) (HOST_WIDE_INT) -1;
650 arg = TREE_VALUE (actual);
651 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
653 if (arg == error_mark_node
654 || mode != TYPE_MODE (TREE_TYPE (arg))
655 /* If they are block mode, the types should match exactly.
656 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
657 which could happen if the parameter has incomplete type. */
658 || (mode == BLKmode
659 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
660 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
661 return (rtx) (HOST_WIDE_INT) -1;
664 /* Extra arguments are valid, but will be ignored below, so we must
665 evaluate them here for side-effects. */
666 for (; actual; actual = TREE_CHAIN (actual))
667 expand_expr (TREE_VALUE (actual), const0_rtx,
668 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
670 /* Expand the function arguments. Do this first so that any
671 new registers get created before we allocate the maps. */
673 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
674 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
676 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
677 formal;
678 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
680 /* Actual parameter, converted to the type of the argument within the
681 function. */
682 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
683 /* Mode of the variable used within the function. */
684 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
685 int invisiref = 0;
687 arg_trees[i] = arg;
688 loc = RTVEC_ELT (arg_vector, i);
690 /* If this is an object passed by invisible reference, we copy the
691 object into a stack slot and save its address. If this will go
692 into memory, we do nothing now. Otherwise, we just expand the
693 argument. */
694 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
695 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
697 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
699 store_expr (arg, stack_slot, 0);
700 arg_vals[i] = XEXP (stack_slot, 0);
701 invisiref = 1;
703 else if (GET_CODE (loc) != MEM)
705 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
707 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
708 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
710 pmode = promote_mode (TREE_TYPE (formal), pmode,
711 &unsignedp, 0);
713 if (GET_MODE (loc) != pmode)
714 abort ();
716 /* The mode if LOC and ARG can differ if LOC was a variable
717 that had its mode promoted via PROMOTED_MODE. */
718 arg_vals[i] = convert_modes (pmode,
719 TYPE_MODE (TREE_TYPE (arg)),
720 expand_expr (arg, NULL_RTX, mode,
721 EXPAND_SUM),
722 unsignedp);
724 else
725 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
727 else
728 arg_vals[i] = 0;
730 if (arg_vals[i] != 0
731 && (! TREE_READONLY (formal)
732 /* If the parameter is not read-only, copy our argument through
733 a register. Also, we cannot use ARG_VALS[I] if it overlaps
734 TARGET in any way. In the inline function, they will likely
735 be two different pseudos, and `safe_from_p' will make all
736 sorts of smart assumptions about their not conflicting.
737 But if ARG_VALS[I] overlaps TARGET, these assumptions are
738 wrong, so put ARG_VALS[I] into a fresh register.
739 Don't worry about invisible references, since their stack
740 temps will never overlap the target. */
741 || (target != 0
742 && ! invisiref
743 && (GET_CODE (arg_vals[i]) == REG
744 || GET_CODE (arg_vals[i]) == SUBREG
745 || GET_CODE (arg_vals[i]) == MEM)
746 && reg_overlap_mentioned_p (arg_vals[i], target))
747 /* ??? We must always copy a SUBREG into a REG, because it might
748 get substituted into an address, and not all ports correctly
749 handle SUBREGs in addresses. */
750 || (GET_CODE (arg_vals[i]) == SUBREG)))
751 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
753 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
754 && POINTER_TYPE_P (TREE_TYPE (formal)))
755 mark_reg_pointer (arg_vals[i],
756 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
759 /* Allocate the structures we use to remap things. */
761 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
762 map->fndecl = fndecl;
764 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
765 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
767 /* We used to use alloca here, but the size of what it would try to
768 allocate would occasionally cause it to exceed the stack limit and
769 cause unpredictable core dumps. */
770 real_label_map
771 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
772 map->label_map = real_label_map;
773 map->local_return_label = NULL_RTX;
775 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
776 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
777 map->min_insnno = 0;
778 map->max_insnno = inl_max_uid;
780 map->integrating = 1;
781 map->compare_src = NULL_RTX;
782 map->compare_mode = VOIDmode;
784 /* const_equiv_varray maps pseudos in our routine to constants, so
785 it needs to be large enough for all our pseudos. This is the
786 number we are currently using plus the number in the called
787 routine, plus 15 for each arg, five to compute the virtual frame
788 pointer, and five for the return value. This should be enough
789 for most cases. We do not reference entries outside the range of
790 the map.
792 ??? These numbers are quite arbitrary and were obtained by
793 experimentation. At some point, we should try to allocate the
794 table after all the parameters are set up so we an more accurately
795 estimate the number of pseudos we will need. */
797 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
798 (max_reg_num ()
799 + (max_regno - FIRST_PSEUDO_REGISTER)
800 + 15 * nargs
801 + 10),
802 "expand_inline_function");
803 map->const_age = 0;
805 /* Record the current insn in case we have to set up pointers to frame
806 and argument memory blocks. If there are no insns yet, add a dummy
807 insn that can be used as an insertion point. */
808 map->insns_at_start = get_last_insn ();
809 if (map->insns_at_start == 0)
810 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
812 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
813 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
815 /* Update the outgoing argument size to allow for those in the inlined
816 function. */
817 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
818 current_function_outgoing_args_size = inl_f->outgoing_args_size;
820 /* If the inline function needs to make PIC references, that means
821 that this function's PIC offset table must be used. */
822 if (inl_f->uses_pic_offset_table)
823 current_function_uses_pic_offset_table = 1;
825 /* If this function needs a context, set it up. */
826 if (inl_f->needs_context)
827 static_chain_value = lookup_static_chain (fndecl);
829 if (GET_CODE (parm_insns) == NOTE
830 && NOTE_LINE_NUMBER (parm_insns) > 0)
832 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
833 NOTE_LINE_NUMBER (parm_insns));
834 if (note)
835 RTX_INTEGRATED_P (note) = 1;
838 /* Process each argument. For each, set up things so that the function's
839 reference to the argument will refer to the argument being passed.
840 We only replace REG with REG here. Any simplifications are done
841 via const_equiv_map.
843 We make two passes: In the first, we deal with parameters that will
844 be placed into registers, since we need to ensure that the allocated
845 register number fits in const_equiv_map. Then we store all non-register
846 parameters into their memory location. */
848 /* Don't try to free temp stack slots here, because we may put one of the
849 parameters into a temp stack slot. */
851 for (i = 0; i < nargs; i++)
853 rtx copy = arg_vals[i];
855 loc = RTVEC_ELT (arg_vector, i);
857 /* There are three cases, each handled separately. */
858 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
859 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
861 /* This must be an object passed by invisible reference (it could
862 also be a variable-sized object, but we forbid inlining functions
863 with variable-sized arguments). COPY is the address of the
864 actual value (this computation will cause it to be copied). We
865 map that address for the register, noting the actual address as
866 an equivalent in case it can be substituted into the insns. */
868 if (GET_CODE (copy) != REG)
870 temp = copy_addr_to_reg (copy);
871 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
872 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
873 copy = temp;
875 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
877 else if (GET_CODE (loc) == MEM)
879 /* This is the case of a parameter that lives in memory. It
880 will live in the block we allocate in the called routine's
881 frame that simulates the incoming argument area. Do nothing
882 with the parameter now; we will call store_expr later. In
883 this case, however, we must ensure that the virtual stack and
884 incoming arg rtx values are expanded now so that we can be
885 sure we have enough slots in the const equiv map since the
886 store_expr call can easily blow the size estimate. */
887 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
888 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
890 else if (GET_CODE (loc) == REG)
891 process_reg_param (map, loc, copy);
892 else if (GET_CODE (loc) == CONCAT)
894 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
895 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
896 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
897 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
899 process_reg_param (map, locreal, copyreal);
900 process_reg_param (map, locimag, copyimag);
902 else
903 abort ();
906 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
907 specially. This function can be called recursively, so we need to
908 save the previous value. */
909 inlining_previous = inlining;
910 inlining = inl_f;
912 /* Now do the parameters that will be placed in memory. */
914 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
915 formal; formal = TREE_CHAIN (formal), i++)
917 loc = RTVEC_ELT (arg_vector, i);
919 if (GET_CODE (loc) == MEM
920 /* Exclude case handled above. */
921 && ! (GET_CODE (XEXP (loc, 0)) == REG
922 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
924 rtx note = emit_note (DECL_SOURCE_FILE (formal),
925 DECL_SOURCE_LINE (formal));
926 if (note)
927 RTX_INTEGRATED_P (note) = 1;
929 /* Compute the address in the area we reserved and store the
930 value there. */
931 temp = copy_rtx_and_substitute (loc, map, 1);
932 subst_constants (&temp, NULL_RTX, map, 1);
933 apply_change_group ();
934 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
935 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
936 store_expr (arg_trees[i], temp, 0);
940 /* Deal with the places that the function puts its result.
941 We are driven by what is placed into DECL_RESULT.
943 Initially, we assume that we don't have anything special handling for
944 REG_FUNCTION_RETURN_VALUE_P. */
946 map->inline_target = 0;
947 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
948 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
950 if (TYPE_MODE (type) == VOIDmode)
951 /* There is no return value to worry about. */
953 else if (GET_CODE (loc) == MEM)
955 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
957 temp = copy_rtx_and_substitute (loc, map, 1);
958 subst_constants (&temp, NULL_RTX, map, 1);
959 apply_change_group ();
960 target = temp;
962 else
964 if (! structure_value_addr
965 || ! aggregate_value_p (DECL_RESULT (fndecl)))
966 abort ();
968 /* Pass the function the address in which to return a structure
969 value. Note that a constructor can cause someone to call us
970 with STRUCTURE_VALUE_ADDR, but the initialization takes place
971 via the first parameter, rather than the struct return address.
973 We have two cases: If the address is a simple register
974 indirect, use the mapping mechanism to point that register to
975 our structure return address. Otherwise, store the structure
976 return value into the place that it will be referenced from. */
978 if (GET_CODE (XEXP (loc, 0)) == REG)
980 temp = force_operand (structure_value_addr, NULL_RTX);
981 temp = force_reg (Pmode, temp);
982 /* A virtual register might be invalid in an insn, because
983 it can cause trouble in reload. Since we don't have access
984 to the expanders at map translation time, make sure we have
985 a proper register now.
986 If a virtual register is actually valid, cse or combine
987 can put it into the mapped insns. */
988 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
989 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
990 temp = copy_to_mode_reg (Pmode, temp);
991 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
993 if (CONSTANT_P (structure_value_addr)
994 || GET_CODE (structure_value_addr) == ADDRESSOF
995 || (GET_CODE (structure_value_addr) == PLUS
996 && (XEXP (structure_value_addr, 0)
997 == virtual_stack_vars_rtx)
998 && (GET_CODE (XEXP (structure_value_addr, 1))
999 == CONST_INT)))
1001 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1002 CONST_AGE_PARM);
1005 else
1007 temp = copy_rtx_and_substitute (loc, map, 1);
1008 subst_constants (&temp, NULL_RTX, map, 0);
1009 apply_change_group ();
1010 emit_move_insn (temp, structure_value_addr);
1014 else if (ignore)
1015 /* We will ignore the result value, so don't look at its structure.
1016 Note that preparations for an aggregate return value
1017 do need to be made (above) even if it will be ignored. */
1019 else if (GET_CODE (loc) == REG)
1021 /* The function returns an object in a register and we use the return
1022 value. Set up our target for remapping. */
1024 /* Machine mode function was declared to return. */
1025 enum machine_mode departing_mode = TYPE_MODE (type);
1026 /* (Possibly wider) machine mode it actually computes
1027 (for the sake of callers that fail to declare it right).
1028 We have to use the mode of the result's RTL, rather than
1029 its type, since expand_function_start may have promoted it. */
1030 enum machine_mode arriving_mode
1031 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1032 rtx reg_to_map;
1034 /* Don't use MEMs as direct targets because on some machines
1035 substituting a MEM for a REG makes invalid insns.
1036 Let the combiner substitute the MEM if that is valid. */
1037 if (target == 0 || GET_CODE (target) != REG
1038 || GET_MODE (target) != departing_mode)
1040 /* Don't make BLKmode registers. If this looks like
1041 a BLKmode object being returned in a register, get
1042 the mode from that, otherwise abort. */
1043 if (departing_mode == BLKmode)
1045 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1047 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1048 arriving_mode = departing_mode;
1050 else
1051 abort ();
1054 target = gen_reg_rtx (departing_mode);
1057 /* If function's value was promoted before return,
1058 avoid machine mode mismatch when we substitute INLINE_TARGET.
1059 But TARGET is what we will return to the caller. */
1060 if (arriving_mode != departing_mode)
1062 /* Avoid creating a paradoxical subreg wider than
1063 BITS_PER_WORD, since that is illegal. */
1064 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1066 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1067 GET_MODE_BITSIZE (arriving_mode)))
1068 /* Maybe could be handled by using convert_move () ? */
1069 abort ();
1070 reg_to_map = gen_reg_rtx (arriving_mode);
1071 target = gen_lowpart (departing_mode, reg_to_map);
1073 else
1074 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1076 else
1077 reg_to_map = target;
1079 /* Usually, the result value is the machine's return register.
1080 Sometimes it may be a pseudo. Handle both cases. */
1081 if (REG_FUNCTION_VALUE_P (loc))
1082 map->inline_target = reg_to_map;
1083 else
1084 map->reg_map[REGNO (loc)] = reg_to_map;
1086 else if (GET_CODE (loc) == CONCAT)
1088 enum machine_mode departing_mode = TYPE_MODE (type);
1089 enum machine_mode arriving_mode
1090 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1092 if (departing_mode != arriving_mode)
1093 abort ();
1094 if (GET_CODE (XEXP (loc, 0)) != REG
1095 || GET_CODE (XEXP (loc, 1)) != REG)
1096 abort ();
1098 /* Don't use MEMs as direct targets because on some machines
1099 substituting a MEM for a REG makes invalid insns.
1100 Let the combiner substitute the MEM if that is valid. */
1101 if (target == 0 || GET_CODE (target) != REG
1102 || GET_MODE (target) != departing_mode)
1103 target = gen_reg_rtx (departing_mode);
1105 if (GET_CODE (target) != CONCAT)
1106 abort ();
1108 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1109 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1111 else
1112 abort ();
1114 /* Remap the exception handler data pointer from one to the other. */
1115 temp = get_exception_pointer (inl_f);
1116 if (temp)
1117 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1119 /* Initialize label_map. get_label_from_map will actually make
1120 the labels. */
1121 memset ((char *) &map->label_map[min_labelno], 0,
1122 (max_labelno - min_labelno) * sizeof (rtx));
1124 /* Make copies of the decls of the symbols in the inline function, so that
1125 the copies of the variables get declared in the current function. Set
1126 up things so that lookup_static_chain knows that to interpret registers
1127 in SAVE_EXPRs for TYPE_SIZEs as local. */
1128 inline_function_decl = fndecl;
1129 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1130 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1131 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1132 inline_function_decl = 0;
1134 /* Make a fresh binding contour that we can easily remove. Do this after
1135 expanding our arguments so cleanups are properly scoped. */
1136 expand_start_bindings_and_block (0, block);
1138 /* Sort the block-map so that it will be easy to find remapped
1139 blocks later. */
1140 qsort (&VARRAY_TREE (map->block_map, 0),
1141 map->block_map->elements_used,
1142 sizeof (tree),
1143 compare_blocks);
1145 /* Perform postincrements before actually calling the function. */
1146 emit_queue ();
1148 /* Clean up stack so that variables might have smaller offsets. */
1149 do_pending_stack_adjust ();
1151 /* Save a copy of the location of const_equiv_varray for
1152 mark_stores, called via note_stores. */
1153 global_const_equiv_varray = map->const_equiv_varray;
1155 /* If the called function does an alloca, save and restore the
1156 stack pointer around the call. This saves stack space, but
1157 also is required if this inline is being done between two
1158 pushes. */
1159 if (inl_f->calls_alloca)
1160 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1162 /* Now copy the insns one by one. */
1163 copy_insn_list (insns, map, static_chain_value);
1165 /* Duplicate the EH regions. This will create an offset from the
1166 region numbers in the function we're inlining to the region
1167 numbers in the calling function. This must wait until after
1168 copy_insn_list, as we need the insn map to be complete. */
1169 eh_region_offset = duplicate_eh_regions (inl_f, map);
1171 /* Now copy the REG_NOTES for those insns. */
1172 copy_insn_notes (insns, map, eh_region_offset);
1174 /* If the insn sequence required one, emit the return label. */
1175 if (map->local_return_label)
1176 emit_label (map->local_return_label);
1178 /* Restore the stack pointer if we saved it above. */
1179 if (inl_f->calls_alloca)
1180 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1182 if (! cfun->x_whole_function_mode_p)
1183 /* In statement-at-a-time mode, we just tell the front-end to add
1184 this block to the list of blocks at this binding level. We
1185 can't do it the way it's done for function-at-a-time mode the
1186 superblocks have not been created yet. */
1187 insert_block (block);
1188 else
1190 BLOCK_CHAIN (block)
1191 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1192 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1195 /* End the scope containing the copied formal parameter variables
1196 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1197 here so that expand_end_bindings will not check for unused
1198 variables. That's already been checked for when the inlined
1199 function was defined. */
1200 expand_end_bindings (NULL_TREE, 1, 1);
1202 /* Must mark the line number note after inlined functions as a repeat, so
1203 that the test coverage code can avoid counting the call twice. This
1204 just tells the code to ignore the immediately following line note, since
1205 there already exists a copy of this note before the expanded inline call.
1206 This line number note is still needed for debugging though, so we can't
1207 delete it. */
1208 if (flag_test_coverage)
1209 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1211 emit_line_note (input_filename, lineno);
1213 /* If the function returns a BLKmode object in a register, copy it
1214 out of the temp register into a BLKmode memory object. */
1215 if (target
1216 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1217 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1218 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1220 if (structure_value_addr)
1222 target = gen_rtx_MEM (TYPE_MODE (type),
1223 memory_address (TYPE_MODE (type),
1224 structure_value_addr));
1225 set_mem_attributes (target, type, 1);
1228 /* Make sure we free the things we explicitly allocated with xmalloc. */
1229 if (real_label_map)
1230 free (real_label_map);
1231 VARRAY_FREE (map->const_equiv_varray);
1232 free (map->reg_map);
1233 VARRAY_FREE (map->block_map);
1234 free (map->insn_map);
1235 free (map);
1236 free (arg_vals);
1237 free (arg_trees);
1239 inlining = inlining_previous;
1241 return target;
1244 /* Make copies of each insn in the given list using the mapping
1245 computed in expand_inline_function. This function may call itself for
1246 insns containing sequences.
1248 Copying is done in two passes, first the insns and then their REG_NOTES.
1250 If static_chain_value is non-zero, it represents the context-pointer
1251 register for the function. */
1253 static void
1254 copy_insn_list (insns, map, static_chain_value)
1255 rtx insns;
1256 struct inline_remap *map;
1257 rtx static_chain_value;
1259 register int i;
1260 rtx insn;
1261 rtx temp;
1262 #ifdef HAVE_cc0
1263 rtx cc0_insn = 0;
1264 #endif
1266 /* Copy the insns one by one. Do this in two passes, first the insns and
1267 then their REG_NOTES. */
1269 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1271 for (insn = insns; insn; insn = NEXT_INSN (insn))
1273 rtx copy, pattern, set;
1275 map->orig_asm_operands_vector = 0;
1277 switch (GET_CODE (insn))
1279 case INSN:
1280 pattern = PATTERN (insn);
1281 set = single_set (insn);
1282 copy = 0;
1283 if (GET_CODE (pattern) == USE
1284 && GET_CODE (XEXP (pattern, 0)) == REG
1285 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1286 /* The (USE (REG n)) at return from the function should
1287 be ignored since we are changing (REG n) into
1288 inline_target. */
1289 break;
1291 /* Ignore setting a function value that we don't want to use. */
1292 if (map->inline_target == 0
1293 && set != 0
1294 && GET_CODE (SET_DEST (set)) == REG
1295 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1297 if (volatile_refs_p (SET_SRC (set)))
1299 rtx new_set;
1301 /* If we must not delete the source,
1302 load it into a new temporary. */
1303 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1305 new_set = single_set (copy);
1306 if (new_set == 0)
1307 abort ();
1309 SET_DEST (new_set)
1310 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1312 /* If the source and destination are the same and it
1313 has a note on it, keep the insn. */
1314 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1315 && REG_NOTES (insn) != 0)
1316 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1317 else
1318 break;
1321 /* Similarly if an ignored return value is clobbered. */
1322 else if (map->inline_target == 0
1323 && GET_CODE (pattern) == CLOBBER
1324 && GET_CODE (XEXP (pattern, 0)) == REG
1325 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1326 break;
1328 /* If this is setting the static chain rtx, omit it. */
1329 else if (static_chain_value != 0
1330 && set != 0
1331 && GET_CODE (SET_DEST (set)) == REG
1332 && rtx_equal_p (SET_DEST (set),
1333 static_chain_incoming_rtx))
1334 break;
1336 /* If this is setting the static chain pseudo, set it from
1337 the value we want to give it instead. */
1338 else if (static_chain_value != 0
1339 && set != 0
1340 && rtx_equal_p (SET_SRC (set),
1341 static_chain_incoming_rtx))
1343 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1345 copy = emit_move_insn (newdest, static_chain_value);
1346 static_chain_value = 0;
1349 /* If this is setting the virtual stack vars register, this must
1350 be the code at the handler for a builtin longjmp. The value
1351 saved in the setjmp buffer will be the address of the frame
1352 we've made for this inlined instance within our frame. But we
1353 know the offset of that value so we can use it to reconstruct
1354 our virtual stack vars register from that value. If we are
1355 copying it from the stack pointer, leave it unchanged. */
1356 else if (set != 0
1357 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1359 HOST_WIDE_INT offset;
1360 temp = map->reg_map[REGNO (SET_DEST (set))];
1361 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1362 REGNO (temp)).rtx;
1364 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1365 offset = 0;
1366 else if (GET_CODE (temp) == PLUS
1367 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1368 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1369 offset = INTVAL (XEXP (temp, 1));
1370 else
1371 abort ();
1373 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1374 temp = SET_SRC (set);
1375 else
1376 temp = force_operand (plus_constant (SET_SRC (set),
1377 - offset),
1378 NULL_RTX);
1380 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1383 else
1384 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1385 /* REG_NOTES will be copied later. */
1387 #ifdef HAVE_cc0
1388 /* If this insn is setting CC0, it may need to look at
1389 the insn that uses CC0 to see what type of insn it is.
1390 In that case, the call to recog via validate_change will
1391 fail. So don't substitute constants here. Instead,
1392 do it when we emit the following insn.
1394 For example, see the pyr.md file. That machine has signed and
1395 unsigned compares. The compare patterns must check the
1396 following branch insn to see which what kind of compare to
1397 emit.
1399 If the previous insn set CC0, substitute constants on it as
1400 well. */
1401 if (sets_cc0_p (PATTERN (copy)) != 0)
1402 cc0_insn = copy;
1403 else
1405 if (cc0_insn)
1406 try_constants (cc0_insn, map);
1407 cc0_insn = 0;
1408 try_constants (copy, map);
1410 #else
1411 try_constants (copy, map);
1412 #endif
1413 break;
1415 case JUMP_INSN:
1416 if (map->integrating && returnjump_p (insn))
1418 if (map->local_return_label == 0)
1419 map->local_return_label = gen_label_rtx ();
1420 pattern = gen_jump (map->local_return_label);
1422 else
1423 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1425 copy = emit_jump_insn (pattern);
1427 #ifdef HAVE_cc0
1428 if (cc0_insn)
1429 try_constants (cc0_insn, map);
1430 cc0_insn = 0;
1431 #endif
1432 try_constants (copy, map);
1434 /* If this used to be a conditional jump insn but whose branch
1435 direction is now know, we must do something special. */
1436 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1438 #ifdef HAVE_cc0
1439 /* If the previous insn set cc0 for us, delete it. */
1440 if (sets_cc0_p (PREV_INSN (copy)))
1441 delete_insn (PREV_INSN (copy));
1442 #endif
1444 /* If this is now a no-op, delete it. */
1445 if (map->last_pc_value == pc_rtx)
1447 delete_insn (copy);
1448 copy = 0;
1450 else
1451 /* Otherwise, this is unconditional jump so we must put a
1452 BARRIER after it. We could do some dead code elimination
1453 here, but jump.c will do it just as well. */
1454 emit_barrier ();
1456 break;
1458 case CALL_INSN:
1459 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1460 three attached sequences: normal call, sibling call and tail
1461 recursion. */
1462 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1464 rtx sequence[3];
1465 rtx tail_label;
1467 for (i = 0; i < 3; i++)
1469 rtx seq;
1471 sequence[i] = NULL_RTX;
1472 seq = XEXP (PATTERN (insn), i);
1473 if (seq)
1475 start_sequence ();
1476 copy_insn_list (seq, map, static_chain_value);
1477 sequence[i] = get_insns ();
1478 end_sequence ();
1482 /* Find the new tail recursion label.
1483 It will already be substituted into sequence[2]. */
1484 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1485 map, 0);
1487 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1488 sequence[0],
1489 sequence[1],
1490 sequence[2],
1491 tail_label));
1492 break;
1495 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1496 copy = emit_call_insn (pattern);
1498 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1499 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1501 /* Because the USAGE information potentially contains objects other
1502 than hard registers, we need to copy it. */
1504 CALL_INSN_FUNCTION_USAGE (copy)
1505 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1506 map, 0);
1508 #ifdef HAVE_cc0
1509 if (cc0_insn)
1510 try_constants (cc0_insn, map);
1511 cc0_insn = 0;
1512 #endif
1513 try_constants (copy, map);
1515 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1516 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1517 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1518 break;
1520 case CODE_LABEL:
1521 copy = emit_label (get_label_from_map (map,
1522 CODE_LABEL_NUMBER (insn)));
1523 LABEL_NAME (copy) = LABEL_NAME (insn);
1524 map->const_age++;
1525 break;
1527 case BARRIER:
1528 copy = emit_barrier ();
1529 break;
1531 case NOTE:
1532 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1533 discarded because it is important to have only one of
1534 each in the current function.
1536 NOTE_INSN_DELETED notes aren't useful.
1538 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1539 pointer (which will soon be dangling) confuses flow's
1540 attempts to preserve bb structures during the compilation
1541 of a function. */
1543 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1544 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1545 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1546 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1548 copy = emit_note (NOTE_SOURCE_FILE (insn),
1549 NOTE_LINE_NUMBER (insn));
1550 if (copy
1551 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1552 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1553 && NOTE_BLOCK (insn))
1555 tree *mapped_block_p;
1557 mapped_block_p
1558 = (tree *) bsearch (NOTE_BLOCK (insn),
1559 &VARRAY_TREE (map->block_map, 0),
1560 map->block_map->elements_used,
1561 sizeof (tree),
1562 find_block);
1564 if (!mapped_block_p)
1565 abort ();
1566 else
1567 NOTE_BLOCK (copy) = *mapped_block_p;
1569 else if (copy
1570 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1571 NOTE_EXPECTED_VALUE (copy)
1572 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1573 map, 0);
1575 else
1576 copy = 0;
1577 break;
1579 default:
1580 abort ();
1583 if (copy)
1584 RTX_INTEGRATED_P (copy) = 1;
1586 map->insn_map[INSN_UID (insn)] = copy;
1590 /* Copy the REG_NOTES. Increment const_age, so that only constants
1591 from parameters can be substituted in. These are the only ones
1592 that are valid across the entire function. */
1594 static void
1595 copy_insn_notes (insns, map, eh_region_offset)
1596 rtx insns;
1597 struct inline_remap *map;
1598 int eh_region_offset;
1600 rtx insn, new_insn;
1602 map->const_age++;
1603 for (insn = insns; insn; insn = NEXT_INSN (insn))
1605 if (! INSN_P (insn))
1606 continue;
1608 new_insn = map->insn_map[INSN_UID (insn)];
1609 if (! new_insn)
1610 continue;
1612 if (REG_NOTES (insn))
1614 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1616 /* We must also do subst_constants, in case one of our parameters
1617 has const type and constant value. */
1618 subst_constants (&note, NULL_RTX, map, 0);
1619 apply_change_group ();
1620 REG_NOTES (new_insn) = note;
1622 /* Delete any REG_LABEL notes from the chain. Remap any
1623 REG_EH_REGION notes. */
1624 for (; note; note = next)
1626 next = XEXP (note, 1);
1627 if (REG_NOTE_KIND (note) == REG_LABEL)
1628 remove_note (new_insn, note);
1629 else if (REG_NOTE_KIND (note) == REG_EH_REGION)
1630 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1631 + eh_region_offset);
1635 if (GET_CODE (insn) == CALL_INSN
1636 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1638 int i;
1639 for (i = 0; i < 3; i++)
1640 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1643 if (GET_CODE (insn) == JUMP_INSN
1644 && GET_CODE (PATTERN (insn)) == RESX)
1645 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1649 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1650 push all of those decls and give each one the corresponding home. */
1652 static void
1653 integrate_parm_decls (args, map, arg_vector)
1654 tree args;
1655 struct inline_remap *map;
1656 rtvec arg_vector;
1658 register tree tail;
1659 register int i;
1661 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1663 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1664 current_function_decl);
1665 rtx new_decl_rtl
1666 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1668 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1669 here, but that's going to require some more work. */
1670 /* DECL_INCOMING_RTL (decl) = ?; */
1671 /* Fully instantiate the address with the equivalent form so that the
1672 debugging information contains the actual register, instead of the
1673 virtual register. Do this by not passing an insn to
1674 subst_constants. */
1675 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1676 apply_change_group ();
1677 SET_DECL_RTL (decl, new_decl_rtl);
1681 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1682 current function a tree of contexts isomorphic to the one that is given.
1684 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1685 registers used in the DECL_RTL field should be remapped. If it is zero,
1686 no mapping is necessary. */
1688 static tree
1689 integrate_decl_tree (let, map)
1690 tree let;
1691 struct inline_remap *map;
1693 tree t;
1694 tree new_block;
1695 tree *next;
1697 new_block = make_node (BLOCK);
1698 VARRAY_PUSH_TREE (map->block_map, new_block);
1699 next = &BLOCK_VARS (new_block);
1701 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1703 tree d;
1705 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1707 if (DECL_RTL_SET_P (t))
1709 rtx r;
1711 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1713 /* Fully instantiate the address with the equivalent form so that the
1714 debugging information contains the actual register, instead of the
1715 virtual register. Do this by not passing an insn to
1716 subst_constants. */
1717 r = DECL_RTL (d);
1718 subst_constants (&r, NULL_RTX, map, 1);
1719 SET_DECL_RTL (d, r);
1720 apply_change_group ();
1723 /* Add this declaration to the list of variables in the new
1724 block. */
1725 *next = d;
1726 next = &TREE_CHAIN (d);
1729 next = &BLOCK_SUBBLOCKS (new_block);
1730 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1732 *next = integrate_decl_tree (t, map);
1733 BLOCK_SUPERCONTEXT (*next) = new_block;
1734 next = &BLOCK_CHAIN (*next);
1737 TREE_USED (new_block) = TREE_USED (let);
1738 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1740 return new_block;
1743 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1744 except for those few rtx codes that are sharable.
1746 We always return an rtx that is similar to that incoming rtx, with the
1747 exception of possibly changing a REG to a SUBREG or vice versa. No
1748 rtl is ever emitted.
1750 If FOR_LHS is nonzero, if means we are processing something that will
1751 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1752 inlining since we need to be conservative in how it is set for
1753 such cases.
1755 Handle constants that need to be placed in the constant pool by
1756 calling `force_const_mem'. */
1759 copy_rtx_and_substitute (orig, map, for_lhs)
1760 register rtx orig;
1761 struct inline_remap *map;
1762 int for_lhs;
1764 register rtx copy, temp;
1765 register int i, j;
1766 register RTX_CODE code;
1767 register enum machine_mode mode;
1768 register const char *format_ptr;
1769 int regno;
1771 if (orig == 0)
1772 return 0;
1774 code = GET_CODE (orig);
1775 mode = GET_MODE (orig);
1777 switch (code)
1779 case REG:
1780 /* If the stack pointer register shows up, it must be part of
1781 stack-adjustments (*not* because we eliminated the frame pointer!).
1782 Small hard registers are returned as-is. Pseudo-registers
1783 go through their `reg_map'. */
1784 regno = REGNO (orig);
1785 if (regno <= LAST_VIRTUAL_REGISTER
1786 || (map->integrating
1787 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1789 /* Some hard registers are also mapped,
1790 but others are not translated. */
1791 if (map->reg_map[regno] != 0)
1792 return map->reg_map[regno];
1794 /* If this is the virtual frame pointer, make space in current
1795 function's stack frame for the stack frame of the inline function.
1797 Copy the address of this area into a pseudo. Map
1798 virtual_stack_vars_rtx to this pseudo and set up a constant
1799 equivalence for it to be the address. This will substitute the
1800 address into insns where it can be substituted and use the new
1801 pseudo where it can't. */
1802 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1804 rtx loc, seq;
1805 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1806 #ifdef FRAME_GROWS_DOWNWARD
1807 int alignment
1808 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1809 / BITS_PER_UNIT);
1811 /* In this case, virtual_stack_vars_rtx points to one byte
1812 higher than the top of the frame area. So make sure we
1813 allocate a big enough chunk to keep the frame pointer
1814 aligned like a real one. */
1815 if (alignment)
1816 size = CEIL_ROUND (size, alignment);
1817 #endif
1818 start_sequence ();
1819 loc = assign_stack_temp (BLKmode, size, 1);
1820 loc = XEXP (loc, 0);
1821 #ifdef FRAME_GROWS_DOWNWARD
1822 /* In this case, virtual_stack_vars_rtx points to one byte
1823 higher than the top of the frame area. So compute the offset
1824 to one byte higher than our substitute frame. */
1825 loc = plus_constant (loc, size);
1826 #endif
1827 map->reg_map[regno] = temp
1828 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1830 #ifdef STACK_BOUNDARY
1831 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1832 #endif
1834 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1836 seq = gen_sequence ();
1837 end_sequence ();
1838 emit_insn_after (seq, map->insns_at_start);
1839 return temp;
1841 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1842 || (map->integrating
1843 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1844 == orig)))
1846 /* Do the same for a block to contain any arguments referenced
1847 in memory. */
1848 rtx loc, seq;
1849 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1851 start_sequence ();
1852 loc = assign_stack_temp (BLKmode, size, 1);
1853 loc = XEXP (loc, 0);
1854 /* When arguments grow downward, the virtual incoming
1855 args pointer points to the top of the argument block,
1856 so the remapped location better do the same. */
1857 #ifdef ARGS_GROW_DOWNWARD
1858 loc = plus_constant (loc, size);
1859 #endif
1860 map->reg_map[regno] = temp
1861 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1863 #ifdef STACK_BOUNDARY
1864 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1865 #endif
1867 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1869 seq = gen_sequence ();
1870 end_sequence ();
1871 emit_insn_after (seq, map->insns_at_start);
1872 return temp;
1874 else if (REG_FUNCTION_VALUE_P (orig))
1876 /* This is a reference to the function return value. If
1877 the function doesn't have a return value, error. If the
1878 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1879 if (map->inline_target == 0)
1881 if (rtx_equal_function_value_matters)
1882 /* This is an ignored return value. We must not
1883 leave it in with REG_FUNCTION_VALUE_P set, since
1884 that would confuse subsequent inlining of the
1885 current function into a later function. */
1886 return gen_rtx_REG (GET_MODE (orig), regno);
1887 else
1888 /* Must be unrolling loops or replicating code if we
1889 reach here, so return the register unchanged. */
1890 return orig;
1892 else if (GET_MODE (map->inline_target) != BLKmode
1893 && mode != GET_MODE (map->inline_target))
1894 return gen_lowpart (mode, map->inline_target);
1895 else
1896 return map->inline_target;
1898 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1899 /* If leaf_renumber_regs_insn() might remap this register to
1900 some other number, make sure we don't share it with the
1901 inlined function, otherwise delayed optimization of the
1902 inlined function may change it in place, breaking our
1903 reference to it. We may still shared it within the
1904 function, so create an entry for this register in the
1905 reg_map. */
1906 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1907 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1909 if (!map->leaf_reg_map[regno][mode])
1910 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
1911 return map->leaf_reg_map[regno][mode];
1913 #endif
1914 else
1915 return orig;
1917 abort ();
1919 if (map->reg_map[regno] == NULL)
1921 map->reg_map[regno] = gen_reg_rtx (mode);
1922 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1923 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1924 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1925 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1927 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1928 mark_reg_pointer (map->reg_map[regno],
1929 map->regno_pointer_align[regno]);
1931 return map->reg_map[regno];
1933 case SUBREG:
1934 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1935 return simplify_gen_subreg (GET_MODE (orig), copy,
1936 GET_MODE (SUBREG_REG (orig)),
1937 SUBREG_BYTE (orig));
1939 case ADDRESSOF:
1940 copy = gen_rtx_ADDRESSOF (mode,
1941 copy_rtx_and_substitute (XEXP (orig, 0),
1942 map, for_lhs),
1943 0, ADDRESSOF_DECL (orig));
1944 regno = ADDRESSOF_REGNO (orig);
1945 if (map->reg_map[regno])
1946 regno = REGNO (map->reg_map[regno]);
1947 else if (regno > LAST_VIRTUAL_REGISTER)
1949 temp = XEXP (orig, 0);
1950 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1951 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1952 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1953 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1954 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1956 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1957 mark_reg_pointer (map->reg_map[regno],
1958 map->regno_pointer_align[regno]);
1959 regno = REGNO (map->reg_map[regno]);
1961 ADDRESSOF_REGNO (copy) = regno;
1962 return copy;
1964 case USE:
1965 case CLOBBER:
1966 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1967 to (use foo) if the original insn didn't have a subreg.
1968 Removing the subreg distorts the VAX movstrhi pattern
1969 by changing the mode of an operand. */
1970 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1971 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1972 copy = SUBREG_REG (copy);
1973 return gen_rtx_fmt_e (code, VOIDmode, copy);
1975 case CODE_LABEL:
1976 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1977 = LABEL_PRESERVE_P (orig);
1978 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1980 /* We need to handle "deleted" labels that appear in the DECL_RTL
1981 of a LABEL_DECL. */
1982 case NOTE:
1983 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1984 return map->insn_map[INSN_UID (orig)];
1985 break;
1987 case LABEL_REF:
1988 copy
1989 = gen_rtx_LABEL_REF
1990 (mode,
1991 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1992 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1994 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1996 /* The fact that this label was previously nonlocal does not mean
1997 it still is, so we must check if it is within the range of
1998 this function's labels. */
1999 LABEL_REF_NONLOCAL_P (copy)
2000 = (LABEL_REF_NONLOCAL_P (orig)
2001 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2002 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2004 /* If we have made a nonlocal label local, it means that this
2005 inlined call will be referring to our nonlocal goto handler.
2006 So make sure we create one for this block; we normally would
2007 not since this is not otherwise considered a "call". */
2008 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2009 function_call_count++;
2011 return copy;
2013 case PC:
2014 case CC0:
2015 case CONST_INT:
2016 return orig;
2018 case SYMBOL_REF:
2019 /* Symbols which represent the address of a label stored in the constant
2020 pool must be modified to point to a constant pool entry for the
2021 remapped label. Otherwise, symbols are returned unchanged. */
2022 if (CONSTANT_POOL_ADDRESS_P (orig))
2024 struct function *f = inlining ? inlining : cfun;
2025 rtx constant = get_pool_constant_for_function (f, orig);
2026 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2027 if (inlining)
2029 rtx temp = force_const_mem (const_mode,
2030 copy_rtx_and_substitute (constant,
2031 map, 0));
2033 #if 0
2034 /* Legitimizing the address here is incorrect.
2036 Since we had a SYMBOL_REF before, we can assume it is valid
2037 to have one in this position in the insn.
2039 Also, change_address may create new registers. These
2040 registers will not have valid reg_map entries. This can
2041 cause try_constants() to fail because assumes that all
2042 registers in the rtx have valid reg_map entries, and it may
2043 end up replacing one of these new registers with junk. */
2045 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2046 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2047 #endif
2049 temp = XEXP (temp, 0);
2051 #ifdef POINTERS_EXTEND_UNSIGNED
2052 if (GET_MODE (temp) != GET_MODE (orig))
2053 temp = convert_memory_address (GET_MODE (orig), temp);
2054 #endif
2055 return temp;
2057 else if (GET_CODE (constant) == LABEL_REF)
2058 return XEXP (force_const_mem
2059 (GET_MODE (orig),
2060 copy_rtx_and_substitute (constant, map, for_lhs)),
2064 return orig;
2066 case CONST_DOUBLE:
2067 /* We have to make a new copy of this CONST_DOUBLE because don't want
2068 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2069 duplicate of a CONST_DOUBLE we have already seen. */
2070 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2072 REAL_VALUE_TYPE d;
2074 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2075 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2077 else
2078 return immed_double_const (CONST_DOUBLE_LOW (orig),
2079 CONST_DOUBLE_HIGH (orig), VOIDmode);
2081 case CONST:
2082 /* Make new constant pool entry for a constant
2083 that was in the pool of the inline function. */
2084 if (RTX_INTEGRATED_P (orig))
2085 abort ();
2086 break;
2088 case ASM_OPERANDS:
2089 /* If a single asm insn contains multiple output operands then
2090 it contains multiple ASM_OPERANDS rtx's that share the input
2091 and constraint vecs. We must make sure that the copied insn
2092 continues to share it. */
2093 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2095 copy = rtx_alloc (ASM_OPERANDS);
2096 copy->volatil = orig->volatil;
2097 PUT_MODE (copy, GET_MODE (orig));
2098 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2099 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2100 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2101 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2102 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2103 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2104 = map->copy_asm_constraints_vector;
2105 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2106 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2107 return copy;
2109 break;
2111 case CALL:
2112 /* This is given special treatment because the first
2113 operand of a CALL is a (MEM ...) which may get
2114 forced into a register for cse. This is undesirable
2115 if function-address cse isn't wanted or if we won't do cse. */
2116 #ifndef NO_FUNCTION_CSE
2117 if (! (optimize && ! flag_no_function_cse))
2118 #endif
2119 return
2120 gen_rtx_CALL
2121 (GET_MODE (orig),
2122 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2123 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2124 map, 0)),
2125 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2126 break;
2128 #if 0
2129 /* Must be ifdefed out for loop unrolling to work. */
2130 case RETURN:
2131 abort ();
2132 #endif
2134 case SET:
2135 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2136 Adjust the setting by the offset of the area we made.
2137 If the nonlocal goto is into the current function,
2138 this will result in unnecessarily bad code, but should work. */
2139 if (SET_DEST (orig) == virtual_stack_vars_rtx
2140 || SET_DEST (orig) == virtual_incoming_args_rtx)
2142 /* In case a translation hasn't occurred already, make one now. */
2143 rtx equiv_reg;
2144 rtx equiv_loc;
2145 HOST_WIDE_INT loc_offset;
2147 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2148 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2149 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2150 REGNO (equiv_reg)).rtx;
2151 loc_offset
2152 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2154 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2155 force_operand
2156 (plus_constant
2157 (copy_rtx_and_substitute (SET_SRC (orig),
2158 map, 0),
2159 - loc_offset),
2160 NULL_RTX));
2162 else
2163 return gen_rtx_SET (VOIDmode,
2164 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2165 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2166 break;
2168 case MEM:
2169 if (inlining
2170 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2171 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2173 enum machine_mode const_mode
2174 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2175 rtx constant
2176 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2178 constant = copy_rtx_and_substitute (constant, map, 0);
2180 /* If this was an address of a constant pool entry that itself
2181 had to be placed in the constant pool, it might not be a
2182 valid address. So the recursive call might have turned it
2183 into a register. In that case, it isn't a constant any
2184 more, so return it. This has the potential of changing a
2185 MEM into a REG, but we'll assume that it safe. */
2186 if (! CONSTANT_P (constant))
2187 return constant;
2189 return validize_mem (force_const_mem (const_mode, constant));
2192 copy = rtx_alloc (MEM);
2193 PUT_MODE (copy, mode);
2194 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2195 MEM_COPY_ATTRIBUTES (copy, orig);
2196 return copy;
2198 default:
2199 break;
2202 copy = rtx_alloc (code);
2203 PUT_MODE (copy, mode);
2204 copy->in_struct = orig->in_struct;
2205 copy->volatil = orig->volatil;
2206 copy->unchanging = orig->unchanging;
2208 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2210 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2212 switch (*format_ptr++)
2214 case '0':
2215 /* Copy this through the wide int field; that's safest. */
2216 X0WINT (copy, i) = X0WINT (orig, i);
2217 break;
2219 case 'e':
2220 XEXP (copy, i)
2221 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2222 break;
2224 case 'u':
2225 /* Change any references to old-insns to point to the
2226 corresponding copied insns. */
2227 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2228 break;
2230 case 'E':
2231 XVEC (copy, i) = XVEC (orig, i);
2232 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2234 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2235 for (j = 0; j < XVECLEN (copy, i); j++)
2236 XVECEXP (copy, i, j)
2237 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2238 map, for_lhs);
2240 break;
2242 case 'w':
2243 XWINT (copy, i) = XWINT (orig, i);
2244 break;
2246 case 'i':
2247 XINT (copy, i) = XINT (orig, i);
2248 break;
2250 case 's':
2251 XSTR (copy, i) = XSTR (orig, i);
2252 break;
2254 case 't':
2255 XTREE (copy, i) = XTREE (orig, i);
2256 break;
2258 default:
2259 abort ();
2263 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2265 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2266 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2267 map->copy_asm_constraints_vector
2268 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2271 return copy;
2274 /* Substitute known constant values into INSN, if that is valid. */
2276 void
2277 try_constants (insn, map)
2278 rtx insn;
2279 struct inline_remap *map;
2281 int i;
2283 map->num_sets = 0;
2285 /* First try just updating addresses, then other things. This is
2286 important when we have something like the store of a constant
2287 into memory and we can update the memory address but the machine
2288 does not support a constant source. */
2289 subst_constants (&PATTERN (insn), insn, map, 1);
2290 apply_change_group ();
2291 subst_constants (&PATTERN (insn), insn, map, 0);
2292 apply_change_group ();
2294 /* Show we don't know the value of anything stored or clobbered. */
2295 note_stores (PATTERN (insn), mark_stores, NULL);
2296 map->last_pc_value = 0;
2297 #ifdef HAVE_cc0
2298 map->last_cc0_value = 0;
2299 #endif
2301 /* Set up any constant equivalences made in this insn. */
2302 for (i = 0; i < map->num_sets; i++)
2304 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2306 int regno = REGNO (map->equiv_sets[i].dest);
2308 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2309 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2310 /* Following clause is a hack to make case work where GNU C++
2311 reassigns a variable to make cse work right. */
2312 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2313 regno).rtx,
2314 map->equiv_sets[i].equiv))
2315 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2316 map->equiv_sets[i].equiv, map->const_age);
2318 else if (map->equiv_sets[i].dest == pc_rtx)
2319 map->last_pc_value = map->equiv_sets[i].equiv;
2320 #ifdef HAVE_cc0
2321 else if (map->equiv_sets[i].dest == cc0_rtx)
2322 map->last_cc0_value = map->equiv_sets[i].equiv;
2323 #endif
2327 /* Substitute known constants for pseudo regs in the contents of LOC,
2328 which are part of INSN.
2329 If INSN is zero, the substitution should always be done (this is used to
2330 update DECL_RTL).
2331 These changes are taken out by try_constants if the result is not valid.
2333 Note that we are more concerned with determining when the result of a SET
2334 is a constant, for further propagation, than actually inserting constants
2335 into insns; cse will do the latter task better.
2337 This function is also used to adjust address of items previously addressed
2338 via the virtual stack variable or virtual incoming arguments registers.
2340 If MEMONLY is nonzero, only make changes inside a MEM. */
2342 static void
2343 subst_constants (loc, insn, map, memonly)
2344 rtx *loc;
2345 rtx insn;
2346 struct inline_remap *map;
2347 int memonly;
2349 rtx x = *loc;
2350 register int i, j;
2351 register enum rtx_code code;
2352 register const char *format_ptr;
2353 int num_changes = num_validated_changes ();
2354 rtx new = 0;
2355 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2357 code = GET_CODE (x);
2359 switch (code)
2361 case PC:
2362 case CONST_INT:
2363 case CONST_DOUBLE:
2364 case SYMBOL_REF:
2365 case CONST:
2366 case LABEL_REF:
2367 case ADDRESS:
2368 return;
2370 #ifdef HAVE_cc0
2371 case CC0:
2372 if (! memonly)
2373 validate_change (insn, loc, map->last_cc0_value, 1);
2374 return;
2375 #endif
2377 case USE:
2378 case CLOBBER:
2379 /* The only thing we can do with a USE or CLOBBER is possibly do
2380 some substitutions in a MEM within it. */
2381 if (GET_CODE (XEXP (x, 0)) == MEM)
2382 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2383 return;
2385 case REG:
2386 /* Substitute for parms and known constants. Don't replace
2387 hard regs used as user variables with constants. */
2388 if (! memonly)
2390 int regno = REGNO (x);
2391 struct const_equiv_data *p;
2393 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2394 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2395 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2396 p->rtx != 0)
2397 && p->age >= map->const_age)
2398 validate_change (insn, loc, p->rtx, 1);
2400 return;
2402 case SUBREG:
2403 /* SUBREG applied to something other than a reg
2404 should be treated as ordinary, since that must
2405 be a special hack and we don't know how to treat it specially.
2406 Consider for example mulsidi3 in m68k.md.
2407 Ordinary SUBREG of a REG needs this special treatment. */
2408 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2410 rtx inner = SUBREG_REG (x);
2411 rtx new = 0;
2413 /* We can't call subst_constants on &SUBREG_REG (x) because any
2414 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2415 see what is inside, try to form the new SUBREG and see if that is
2416 valid. We handle two cases: extracting a full word in an
2417 integral mode and extracting the low part. */
2418 subst_constants (&inner, NULL_RTX, map, 0);
2419 new = simplify_gen_subreg (GET_MODE (x), inner,
2420 GET_MODE (SUBREG_REG (x)),
2421 SUBREG_BYTE (x));
2423 if (new)
2424 validate_change (insn, loc, new, 1);
2425 else
2426 cancel_changes (num_changes);
2428 return;
2430 break;
2432 case MEM:
2433 subst_constants (&XEXP (x, 0), insn, map, 0);
2435 /* If a memory address got spoiled, change it back. */
2436 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2437 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2438 cancel_changes (num_changes);
2439 return;
2441 case SET:
2443 /* Substitute constants in our source, and in any arguments to a
2444 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2445 itself. */
2446 rtx *dest_loc = &SET_DEST (x);
2447 rtx dest = *dest_loc;
2448 rtx src, tem;
2449 enum machine_mode compare_mode = VOIDmode;
2451 /* If SET_SRC is a COMPARE which subst_constants would turn into
2452 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2453 is to be done. */
2454 if (GET_CODE (SET_SRC (x)) == COMPARE)
2456 src = SET_SRC (x);
2457 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2458 #ifdef HAVE_cc0
2459 || dest == cc0_rtx
2460 #endif
2463 compare_mode = GET_MODE (XEXP (src, 0));
2464 if (compare_mode == VOIDmode)
2465 compare_mode = GET_MODE (XEXP (src, 1));
2469 subst_constants (&SET_SRC (x), insn, map, memonly);
2470 src = SET_SRC (x);
2472 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2473 || GET_CODE (*dest_loc) == SUBREG
2474 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2476 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2478 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2479 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2481 dest_loc = &XEXP (*dest_loc, 0);
2484 /* Do substitute in the address of a destination in memory. */
2485 if (GET_CODE (*dest_loc) == MEM)
2486 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2488 /* Check for the case of DEST a SUBREG, both it and the underlying
2489 register are less than one word, and the SUBREG has the wider mode.
2490 In the case, we are really setting the underlying register to the
2491 source converted to the mode of DEST. So indicate that. */
2492 if (GET_CODE (dest) == SUBREG
2493 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2494 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2495 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2496 <= GET_MODE_SIZE (GET_MODE (dest)))
2497 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2498 src)))
2499 src = tem, dest = SUBREG_REG (dest);
2501 /* If storing a recognizable value save it for later recording. */
2502 if ((map->num_sets < MAX_RECOG_OPERANDS)
2503 && (CONSTANT_P (src)
2504 || (GET_CODE (src) == REG
2505 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2506 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2507 || (GET_CODE (src) == PLUS
2508 && GET_CODE (XEXP (src, 0)) == REG
2509 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2510 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2511 && CONSTANT_P (XEXP (src, 1)))
2512 || GET_CODE (src) == COMPARE
2513 #ifdef HAVE_cc0
2514 || dest == cc0_rtx
2515 #endif
2516 || (dest == pc_rtx
2517 && (src == pc_rtx || GET_CODE (src) == RETURN
2518 || GET_CODE (src) == LABEL_REF))))
2520 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2521 it will cause us to save the COMPARE with any constants
2522 substituted, which is what we want for later. */
2523 rtx src_copy = copy_rtx (src);
2524 map->equiv_sets[map->num_sets].equiv = src_copy;
2525 map->equiv_sets[map->num_sets++].dest = dest;
2526 if (compare_mode != VOIDmode
2527 && GET_CODE (src) == COMPARE
2528 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2529 #ifdef HAVE_cc0
2530 || dest == cc0_rtx
2531 #endif
2533 && GET_MODE (XEXP (src, 0)) == VOIDmode
2534 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2536 map->compare_src = src_copy;
2537 map->compare_mode = compare_mode;
2541 return;
2543 default:
2544 break;
2547 format_ptr = GET_RTX_FORMAT (code);
2549 /* If the first operand is an expression, save its mode for later. */
2550 if (*format_ptr == 'e')
2551 op0_mode = GET_MODE (XEXP (x, 0));
2553 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2555 switch (*format_ptr++)
2557 case '0':
2558 break;
2560 case 'e':
2561 if (XEXP (x, i))
2562 subst_constants (&XEXP (x, i), insn, map, memonly);
2563 break;
2565 case 'u':
2566 case 'i':
2567 case 's':
2568 case 'w':
2569 case 'n':
2570 case 't':
2571 break;
2573 case 'E':
2574 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2575 for (j = 0; j < XVECLEN (x, i); j++)
2576 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2578 break;
2580 default:
2581 abort ();
2585 /* If this is a commutative operation, move a constant to the second
2586 operand unless the second operand is already a CONST_INT. */
2587 if (! memonly
2588 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2589 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2591 rtx tem = XEXP (x, 0);
2592 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2593 validate_change (insn, &XEXP (x, 1), tem, 1);
2596 /* Simplify the expression in case we put in some constants. */
2597 if (! memonly)
2598 switch (GET_RTX_CLASS (code))
2600 case '1':
2601 if (op0_mode == MAX_MACHINE_MODE)
2602 abort ();
2603 new = simplify_unary_operation (code, GET_MODE (x),
2604 XEXP (x, 0), op0_mode);
2605 break;
2607 case '<':
2609 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2611 if (op_mode == VOIDmode)
2612 op_mode = GET_MODE (XEXP (x, 1));
2613 new = simplify_relational_operation (code, op_mode,
2614 XEXP (x, 0), XEXP (x, 1));
2615 #ifdef FLOAT_STORE_FLAG_VALUE
2616 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2618 enum machine_mode mode = GET_MODE (x);
2619 if (new == const0_rtx)
2620 new = CONST0_RTX (mode);
2621 else
2623 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2624 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2627 #endif
2628 break;
2631 case '2':
2632 case 'c':
2633 new = simplify_binary_operation (code, GET_MODE (x),
2634 XEXP (x, 0), XEXP (x, 1));
2635 break;
2637 case 'b':
2638 case '3':
2639 if (op0_mode == MAX_MACHINE_MODE)
2640 abort ();
2642 if (code == IF_THEN_ELSE)
2644 rtx op0 = XEXP (x, 0);
2646 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2647 && GET_MODE (op0) == VOIDmode
2648 && ! side_effects_p (op0)
2649 && XEXP (op0, 0) == map->compare_src
2650 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2652 /* We have compare of two VOIDmode constants for which
2653 we recorded the comparison mode. */
2654 rtx temp =
2655 simplify_relational_operation (GET_CODE (op0),
2656 map->compare_mode,
2657 XEXP (op0, 0),
2658 XEXP (op0, 1));
2660 if (temp == const0_rtx)
2661 new = XEXP (x, 2);
2662 else if (temp == const1_rtx)
2663 new = XEXP (x, 1);
2666 if (!new)
2667 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2668 XEXP (x, 0), XEXP (x, 1),
2669 XEXP (x, 2));
2670 break;
2673 if (new)
2674 validate_change (insn, loc, new, 1);
2677 /* Show that register modified no longer contain known constants. We are
2678 called from note_stores with parts of the new insn. */
2680 static void
2681 mark_stores (dest, x, data)
2682 rtx dest;
2683 rtx x ATTRIBUTE_UNUSED;
2684 void *data ATTRIBUTE_UNUSED;
2686 int regno = -1;
2687 enum machine_mode mode = VOIDmode;
2689 /* DEST is always the innermost thing set, except in the case of
2690 SUBREGs of hard registers. */
2692 if (GET_CODE (dest) == REG)
2693 regno = REGNO (dest), mode = GET_MODE (dest);
2694 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2696 regno = REGNO (SUBREG_REG (dest));
2697 if (regno < FIRST_PSEUDO_REGISTER)
2698 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2699 GET_MODE (SUBREG_REG (dest)),
2700 SUBREG_BYTE (dest),
2701 GET_MODE (dest));
2702 mode = GET_MODE (SUBREG_REG (dest));
2705 if (regno >= 0)
2707 unsigned int uregno = regno;
2708 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2709 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2710 unsigned int i;
2712 /* Ignore virtual stack var or virtual arg register since those
2713 are handled separately. */
2714 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2715 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2716 for (i = uregno; i <= last_reg; i++)
2717 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2718 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2722 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2723 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2724 that it points to the node itself, thus indicating that the node is its
2725 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2726 the given node is NULL, recursively descend the decl/block tree which
2727 it is the root of, and for each other ..._DECL or BLOCK node contained
2728 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2729 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2730 values to point to themselves. */
2732 static void
2733 set_block_origin_self (stmt)
2734 register tree stmt;
2736 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2738 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2741 register tree local_decl;
2743 for (local_decl = BLOCK_VARS (stmt);
2744 local_decl != NULL_TREE;
2745 local_decl = TREE_CHAIN (local_decl))
2746 set_decl_origin_self (local_decl); /* Potential recursion. */
2750 register tree subblock;
2752 for (subblock = BLOCK_SUBBLOCKS (stmt);
2753 subblock != NULL_TREE;
2754 subblock = BLOCK_CHAIN (subblock))
2755 set_block_origin_self (subblock); /* Recurse. */
2760 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2761 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2762 node to so that it points to the node itself, thus indicating that the
2763 node represents its own (abstract) origin. Additionally, if the
2764 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2765 the decl/block tree of which the given node is the root of, and for
2766 each other ..._DECL or BLOCK node contained therein whose
2767 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2768 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2769 point to themselves. */
2771 void
2772 set_decl_origin_self (decl)
2773 register tree decl;
2775 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2777 DECL_ABSTRACT_ORIGIN (decl) = decl;
2778 if (TREE_CODE (decl) == FUNCTION_DECL)
2780 register tree arg;
2782 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2783 DECL_ABSTRACT_ORIGIN (arg) = arg;
2784 if (DECL_INITIAL (decl) != NULL_TREE
2785 && DECL_INITIAL (decl) != error_mark_node)
2786 set_block_origin_self (DECL_INITIAL (decl));
2791 /* Given a pointer to some BLOCK node, and a boolean value to set the
2792 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2793 the given block, and for all local decls and all local sub-blocks
2794 (recursively) which are contained therein. */
2796 static void
2797 set_block_abstract_flags (stmt, setting)
2798 register tree stmt;
2799 register int setting;
2801 register tree local_decl;
2802 register tree subblock;
2804 BLOCK_ABSTRACT (stmt) = setting;
2806 for (local_decl = BLOCK_VARS (stmt);
2807 local_decl != NULL_TREE;
2808 local_decl = TREE_CHAIN (local_decl))
2809 set_decl_abstract_flags (local_decl, setting);
2811 for (subblock = BLOCK_SUBBLOCKS (stmt);
2812 subblock != NULL_TREE;
2813 subblock = BLOCK_CHAIN (subblock))
2814 set_block_abstract_flags (subblock, setting);
2817 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2818 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2819 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2820 set the abstract flags for all of the parameters, local vars, local
2821 blocks and sub-blocks (recursively) to the same setting. */
2823 void
2824 set_decl_abstract_flags (decl, setting)
2825 register tree decl;
2826 register int setting;
2828 DECL_ABSTRACT (decl) = setting;
2829 if (TREE_CODE (decl) == FUNCTION_DECL)
2831 register tree arg;
2833 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2834 DECL_ABSTRACT (arg) = setting;
2835 if (DECL_INITIAL (decl) != NULL_TREE
2836 && DECL_INITIAL (decl) != error_mark_node)
2837 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2841 /* Output the assembly language code for the function FNDECL
2842 from its DECL_SAVED_INSNS. Used for inline functions that are output
2843 at end of compilation instead of where they came in the source. */
2845 void
2846 output_inline_function (fndecl)
2847 tree fndecl;
2849 struct function *old_cfun = cfun;
2850 enum debug_info_type old_write_symbols = write_symbols;
2851 struct function *f = DECL_SAVED_INSNS (fndecl);
2853 cfun = f;
2854 current_function_decl = fndecl;
2855 clear_emit_caches ();
2857 set_new_last_label_num (f->inl_max_label_num);
2859 /* We're not deferring this any longer. */
2860 DECL_DEFER_OUTPUT (fndecl) = 0;
2862 /* If requested, suppress debugging information. */
2863 if (f->no_debugging_symbols)
2864 write_symbols = NO_DEBUG;
2866 /* Do any preparation, such as emitting abstract debug info for the inline
2867 before it gets mangled by optimization. */
2868 note_outlining_of_inline_function (fndecl);
2870 /* Compile this function all the way down to assembly code. */
2871 rest_of_compilation (fndecl);
2873 /* We can't inline this anymore. */
2874 f->inlinable = 0;
2875 DECL_INLINE (fndecl) = 0;
2877 cfun = old_cfun;
2878 current_function_decl = old_cfun ? old_cfun->decl : 0;
2879 write_symbols = old_write_symbols;