Makefile.am (doxygen): Assume script is missing execute perms.
[official-gcc.git] / gcc / integrate.c
blob1955c0511bdd101ceac61bd2004ee7d909f21be3
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "intl.h"
41 #include "loop.h"
42 #include "params.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
69 #endif
71 static rtvec initialize_for_inline PARAMS ((tree));
72 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
74 rtvec));
75 static tree integrate_decl_tree PARAMS ((tree,
76 struct inline_remap *));
77 static void subst_constants PARAMS ((rtx *, rtx,
78 struct inline_remap *, int));
79 static void set_block_origin_self PARAMS ((tree));
80 static void set_block_abstract_flags PARAMS ((tree, int));
81 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
82 rtx));
83 void set_decl_abstract_flags PARAMS ((tree, int));
84 static void mark_stores PARAMS ((rtx, rtx, void *));
85 static void save_parm_insns PARAMS ((rtx, rtx));
86 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
87 rtx));
88 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
89 int));
90 static int compare_blocks PARAMS ((const PTR, const PTR));
91 static int find_block PARAMS ((const PTR, const PTR));
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
109 rtx x = map->label_map[i];
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx ();
114 return x;
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
129 /* For functions marked as inline increase the maximum size to
130 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (MAX_INLINE_INSNS
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
138 register int ninsns = 0;
139 register tree parms;
141 if (DECL_UNINLINABLE (fndecl))
142 return N_("function cannot be inline");
144 /* No inlines with varargs. */
145 if ((last && TREE_VALUE (last) != void_type_node)
146 || current_function_varargs)
147 return N_("varargs function cannot be inline");
149 if (current_function_calls_alloca)
150 return N_("function using alloca cannot be inline");
152 if (current_function_calls_setjmp)
153 return N_("function using setjmp cannot be inline");
155 if (current_function_calls_eh_return)
156 return N_("function uses __builtin_eh_return");
158 if (current_function_contains_functions)
159 return N_("function with nested functions cannot be inline");
161 if (forced_labels)
162 return
163 N_("function with label addresses used in initializers cannot inline");
165 if (current_function_cannot_inline)
166 return current_function_cannot_inline;
168 /* If its not even close, don't even look. */
169 if (get_max_uid () > 3 * max_insns)
170 return N_("function too large to be inline");
172 #if 0
173 /* Don't inline functions which do not specify a function prototype and
174 have BLKmode argument or take the address of a parameter. */
175 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
177 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
178 TREE_ADDRESSABLE (parms) = 1;
179 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
180 return N_("no prototype, and parameter address used; cannot be inline");
182 #endif
184 /* We can't inline functions that return structures
185 the old-fashioned PCC way, copying into a static block. */
186 if (current_function_returns_pcc_struct)
187 return N_("inline functions not supported for this return value type");
189 /* We can't inline functions that return structures of varying size. */
190 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
191 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
192 return N_("function with varying-size return value cannot be inline");
194 /* Cannot inline a function with a varying size argument or one that
195 receives a transparent union. */
196 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
198 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
199 return N_("function with varying-size parameter cannot be inline");
200 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
201 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
202 return N_("function with transparent unit parameter cannot be inline");
205 if (get_max_uid () > max_insns)
207 for (ninsns = 0, insn = get_first_nonparm_insn ();
208 insn && ninsns < max_insns;
209 insn = NEXT_INSN (insn))
210 if (INSN_P (insn))
211 ninsns++;
213 if (ninsns >= max_insns)
214 return N_("function too large to be inline");
217 /* We will not inline a function which uses computed goto. The addresses of
218 its local labels, which may be tucked into global storage, are of course
219 not constant across instantiations, which causes unexpected behaviour. */
220 if (current_function_has_computed_jump)
221 return N_("function with computed jump cannot inline");
223 /* We cannot inline a nested function that jumps to a nonlocal label. */
224 if (current_function_has_nonlocal_goto)
225 return N_("function with nonlocal goto cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
230 rtx result = DECL_RTL (DECL_RESULT (fndecl));
231 if (GET_CODE (result) == PARALLEL)
232 return N_("inline functions not supported for this return value type");
235 /* If the function has a target specific attribute attached to it,
236 then we assume that we should not inline it. This can be overriden
237 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
238 if (DECL_MACHINE_ATTRIBUTES (fndecl)
239 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
240 return N_("function with target specific attribute(s) cannot be inlined");
242 return NULL;
245 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
246 Zero for a reg that isn't a parm's home.
247 Only reg numbers less than max_parm_reg are mapped here. */
248 static tree *parmdecl_map;
250 /* In save_for_inline, nonzero if past the parm-initialization insns. */
251 static int in_nonparm_insns;
253 /* Subroutine for `save_for_inline'. Performs initialization
254 needed to save FNDECL's insns and info for future inline expansion. */
256 static rtvec
257 initialize_for_inline (fndecl)
258 tree fndecl;
260 int i;
261 rtvec arg_vector;
262 tree parms;
264 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
265 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
266 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
268 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
269 parms;
270 parms = TREE_CHAIN (parms), i++)
272 rtx p = DECL_RTL (parms);
274 /* If we have (mem (addressof (mem ...))), use the inner MEM since
275 otherwise the copy_rtx call below will not unshare the MEM since
276 it shares ADDRESSOF. */
277 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
278 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
279 p = XEXP (XEXP (p, 0), 0);
281 RTVEC_ELT (arg_vector, i) = p;
283 if (GET_CODE (p) == REG)
284 parmdecl_map[REGNO (p)] = parms;
285 else if (GET_CODE (p) == CONCAT)
287 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
288 rtx pimag = gen_imagpart (GET_MODE (preal), p);
290 if (GET_CODE (preal) == REG)
291 parmdecl_map[REGNO (preal)] = parms;
292 if (GET_CODE (pimag) == REG)
293 parmdecl_map[REGNO (pimag)] = parms;
296 /* This flag is cleared later
297 if the function ever modifies the value of the parm. */
298 TREE_READONLY (parms) = 1;
301 return arg_vector;
304 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
305 originally was in the FROM_FN, but now it will be in the
306 TO_FN. */
308 tree
309 copy_decl_for_inlining (decl, from_fn, to_fn)
310 tree decl;
311 tree from_fn;
312 tree to_fn;
314 tree copy;
316 /* Copy the declaration. */
317 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
319 /* For a parameter, we must make an equivalent VAR_DECL, not a
320 new PARM_DECL. */
321 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
322 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
323 TREE_READONLY (copy) = TREE_READONLY (decl);
324 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
326 else
328 copy = copy_node (decl);
329 if (DECL_LANG_SPECIFIC (copy))
330 copy_lang_decl (copy);
332 /* TREE_ADDRESSABLE isn't used to indicate that a label's
333 address has been taken; it's for internal bookkeeping in
334 expand_goto_internal. */
335 if (TREE_CODE (copy) == LABEL_DECL)
336 TREE_ADDRESSABLE (copy) = 0;
339 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
340 declaration inspired this copy. */
341 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
343 /* The new variable/label has no RTL, yet. */
344 SET_DECL_RTL (copy, NULL_RTX);
346 /* These args would always appear unused, if not for this. */
347 TREE_USED (copy) = 1;
349 /* Set the context for the new declaration. */
350 if (!DECL_CONTEXT (decl))
351 /* Globals stay global. */
353 else if (DECL_CONTEXT (decl) != from_fn)
354 /* Things that weren't in the scope of the function we're inlining
355 from aren't in the scope we're inlining too, either. */
357 else if (TREE_STATIC (decl))
358 /* Function-scoped static variables should say in the original
359 function. */
361 else
362 /* Ordinary automatic local variables are now in the scope of the
363 new function. */
364 DECL_CONTEXT (copy) = to_fn;
366 return copy;
369 /* Make the insns and PARM_DECLs of the current function permanent
370 and record other information in DECL_SAVED_INSNS to allow inlining
371 of this function in subsequent calls.
373 This routine need not copy any insns because we are not going
374 to immediately compile the insns in the insn chain. There
375 are two cases when we would compile the insns for FNDECL:
376 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
377 be output at the end of other compilation, because somebody took
378 its address. In the first case, the insns of FNDECL are copied
379 as it is expanded inline, so FNDECL's saved insns are not
380 modified. In the second case, FNDECL is used for the last time,
381 so modifying the rtl is not a problem.
383 We don't have to worry about FNDECL being inline expanded by
384 other functions which are written at the end of compilation
385 because flag_no_inline is turned on when we begin writing
386 functions at the end of compilation. */
388 void
389 save_for_inline (fndecl)
390 tree fndecl;
392 rtx insn;
393 rtvec argvec;
394 rtx first_nonparm_insn;
396 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
397 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
398 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
399 for the parms, prior to elimination of virtual registers.
400 These values are needed for substituting parms properly. */
402 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
404 /* Make and emit a return-label if we have not already done so. */
406 if (return_label == 0)
408 return_label = gen_label_rtx ();
409 emit_label (return_label);
412 argvec = initialize_for_inline (fndecl);
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
418 insn = get_insns ();
419 if (GET_CODE (insn) != NOTE)
420 abort ();
422 /* Get the insn which signals the end of parameter setup code. */
423 first_nonparm_insn = get_first_nonparm_insn ();
425 /* Now just scan the chain of insns to see what happens to our
426 PARM_DECLs. If a PARM_DECL is used but never modified, we
427 can substitute its rtl directly when expanding inline (and
428 perform constant folding when its incoming value is constant).
429 Otherwise, we have to copy its value into a new register and track
430 the new register's life. */
431 in_nonparm_insns = 0;
432 save_parm_insns (insn, first_nonparm_insn);
434 cfun->inl_max_label_num = max_label_num ();
435 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
436 cfun->original_arg_vector = argvec;
437 cfun->original_decl_initial = DECL_INITIAL (fndecl);
438 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
439 DECL_SAVED_INSNS (fndecl) = cfun;
441 /* Clean up. */
442 free (parmdecl_map);
445 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
446 PARM_DECL is used but never modified, we can substitute its rtl directly
447 when expanding inline (and perform constant folding when its incoming
448 value is constant). Otherwise, we have to copy its value into a new
449 register and track the new register's life. */
451 static void
452 save_parm_insns (insn, first_nonparm_insn)
453 rtx insn;
454 rtx first_nonparm_insn;
456 if (insn == NULL_RTX)
457 return;
459 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
461 if (insn == first_nonparm_insn)
462 in_nonparm_insns = 1;
464 if (INSN_P (insn))
466 /* Record what interesting things happen to our parameters. */
467 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
469 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
470 three attached sequences: normal call, sibling call and tail
471 recursion. */
472 if (GET_CODE (insn) == CALL_INSN
473 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
475 int i;
477 for (i = 0; i < 3; i++)
478 save_parm_insns (XEXP (PATTERN (insn), i),
479 first_nonparm_insn);
485 /* Note whether a parameter is modified or not. */
487 static void
488 note_modified_parmregs (reg, x, data)
489 rtx reg;
490 rtx x ATTRIBUTE_UNUSED;
491 void *data ATTRIBUTE_UNUSED;
493 if (GET_CODE (reg) == REG && in_nonparm_insns
494 && REGNO (reg) < max_parm_reg
495 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
496 && parmdecl_map[REGNO (reg)] != 0)
497 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
500 /* Unfortunately, we need a global copy of const_equiv map for communication
501 with a function called from note_stores. Be *very* careful that this
502 is used properly in the presence of recursion. */
504 varray_type global_const_equiv_varray;
506 #define FIXED_BASE_PLUS_P(X) \
507 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
508 && GET_CODE (XEXP (X, 0)) == REG \
509 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
510 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
512 /* Called to set up a mapping for the case where a parameter is in a
513 register. If it is read-only and our argument is a constant, set up the
514 constant equivalence.
516 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
517 if it is a register.
519 Also, don't allow hard registers here; they might not be valid when
520 substituted into insns. */
521 static void
522 process_reg_param (map, loc, copy)
523 struct inline_remap *map;
524 rtx loc, copy;
526 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
527 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
528 && ! REG_USERVAR_P (copy))
529 || (GET_CODE (copy) == REG
530 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
532 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
533 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
534 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
535 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
536 copy = temp;
538 map->reg_map[REGNO (loc)] = copy;
541 /* Compare two BLOCKs for qsort. The key we sort on is the
542 BLOCK_ABSTRACT_ORIGIN of the blocks. */
544 static int
545 compare_blocks (v1, v2)
546 const PTR v1;
547 const PTR v2;
549 tree b1 = *((const tree *) v1);
550 tree b2 = *((const tree *) v2);
552 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
553 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
556 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
557 an original block; the second to a remapped equivalent. */
559 static int
560 find_block (v1, v2)
561 const PTR v1;
562 const PTR v2;
564 const union tree_node *b1 = (const union tree_node *) v1;
565 tree b2 = *((const tree *) v2);
567 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
570 /* Integrate the procedure defined by FNDECL. Note that this function
571 may wind up calling itself. Since the static variables are not
572 reentrant, we do not assign them until after the possibility
573 of recursion is eliminated.
575 If IGNORE is nonzero, do not produce a value.
576 Otherwise store the value in TARGET if it is nonzero and that is convenient.
578 Value is:
579 (rtx)-1 if we could not substitute the function
580 0 if we substituted it and it does not produce a value
581 else an rtx for where the value is stored. */
584 expand_inline_function (fndecl, parms, target, ignore, type,
585 structure_value_addr)
586 tree fndecl, parms;
587 rtx target;
588 int ignore;
589 tree type;
590 rtx structure_value_addr;
592 struct function *inlining_previous;
593 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
594 tree formal, actual, block;
595 rtx parm_insns = inl_f->emit->x_first_insn;
596 rtx insns = (inl_f->inl_last_parm_insn
597 ? NEXT_INSN (inl_f->inl_last_parm_insn)
598 : parm_insns);
599 tree *arg_trees;
600 rtx *arg_vals;
601 int max_regno;
602 register int i;
603 int min_labelno = inl_f->emit->x_first_label_num;
604 int max_labelno = inl_f->inl_max_label_num;
605 int nargs;
606 rtx loc;
607 rtx stack_save = 0;
608 rtx temp;
609 struct inline_remap *map = 0;
610 #ifdef HAVE_cc0
611 rtx cc0_insn = 0;
612 #endif
613 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
614 rtx static_chain_value = 0;
615 int inl_max_uid;
616 int eh_region_offset;
618 /* The pointer used to track the true location of the memory used
619 for MAP->LABEL_MAP. */
620 rtx *real_label_map = 0;
622 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
623 max_regno = inl_f->emit->x_reg_rtx_no + 3;
624 if (max_regno < FIRST_PSEUDO_REGISTER)
625 abort ();
627 /* Pull out the decl for the function definition; fndecl may be a
628 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
629 fndecl = inl_f->decl;
631 nargs = list_length (DECL_ARGUMENTS (fndecl));
633 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
634 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
636 /* Check that the parms type match and that sufficient arguments were
637 passed. Since the appropriate conversions or default promotions have
638 already been applied, the machine modes should match exactly. */
640 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
641 formal;
642 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
644 tree arg;
645 enum machine_mode mode;
647 if (actual == 0)
648 return (rtx) (HOST_WIDE_INT) -1;
650 arg = TREE_VALUE (actual);
651 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
653 if (mode != TYPE_MODE (TREE_TYPE (arg))
654 /* If they are block mode, the types should match exactly.
655 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
656 which could happen if the parameter has incomplete type. */
657 || (mode == BLKmode
658 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
659 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
660 return (rtx) (HOST_WIDE_INT) -1;
663 /* Extra arguments are valid, but will be ignored below, so we must
664 evaluate them here for side-effects. */
665 for (; actual; actual = TREE_CHAIN (actual))
666 expand_expr (TREE_VALUE (actual), const0_rtx,
667 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
669 /* Expand the function arguments. Do this first so that any
670 new registers get created before we allocate the maps. */
672 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
673 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
675 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
676 formal;
677 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
679 /* Actual parameter, converted to the type of the argument within the
680 function. */
681 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
682 /* Mode of the variable used within the function. */
683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
684 int invisiref = 0;
686 arg_trees[i] = arg;
687 loc = RTVEC_ELT (arg_vector, i);
689 /* If this is an object passed by invisible reference, we copy the
690 object into a stack slot and save its address. If this will go
691 into memory, we do nothing now. Otherwise, we just expand the
692 argument. */
693 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
694 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
696 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
698 store_expr (arg, stack_slot, 0);
699 arg_vals[i] = XEXP (stack_slot, 0);
700 invisiref = 1;
702 else if (GET_CODE (loc) != MEM)
704 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
706 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
707 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
709 pmode = promote_mode (TREE_TYPE (formal), pmode,
710 &unsignedp, 0);
712 if (GET_MODE (loc) != pmode)
713 abort ();
715 /* The mode if LOC and ARG can differ if LOC was a variable
716 that had its mode promoted via PROMOTED_MODE. */
717 arg_vals[i] = convert_modes (pmode,
718 TYPE_MODE (TREE_TYPE (arg)),
719 expand_expr (arg, NULL_RTX, mode,
720 EXPAND_SUM),
721 unsignedp);
723 else
724 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
726 else
727 arg_vals[i] = 0;
729 if (arg_vals[i] != 0
730 && (! TREE_READONLY (formal)
731 /* If the parameter is not read-only, copy our argument through
732 a register. Also, we cannot use ARG_VALS[I] if it overlaps
733 TARGET in any way. In the inline function, they will likely
734 be two different pseudos, and `safe_from_p' will make all
735 sorts of smart assumptions about their not conflicting.
736 But if ARG_VALS[I] overlaps TARGET, these assumptions are
737 wrong, so put ARG_VALS[I] into a fresh register.
738 Don't worry about invisible references, since their stack
739 temps will never overlap the target. */
740 || (target != 0
741 && ! invisiref
742 && (GET_CODE (arg_vals[i]) == REG
743 || GET_CODE (arg_vals[i]) == SUBREG
744 || GET_CODE (arg_vals[i]) == MEM)
745 && reg_overlap_mentioned_p (arg_vals[i], target))
746 /* ??? We must always copy a SUBREG into a REG, because it might
747 get substituted into an address, and not all ports correctly
748 handle SUBREGs in addresses. */
749 || (GET_CODE (arg_vals[i]) == SUBREG)))
750 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
752 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
753 && POINTER_TYPE_P (TREE_TYPE (formal)))
754 mark_reg_pointer (arg_vals[i],
755 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
758 /* Allocate the structures we use to remap things. */
760 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
761 map->fndecl = fndecl;
763 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
764 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
766 /* We used to use alloca here, but the size of what it would try to
767 allocate would occasionally cause it to exceed the stack limit and
768 cause unpredictable core dumps. */
769 real_label_map
770 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
771 map->label_map = real_label_map;
772 map->local_return_label = NULL_RTX;
774 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
775 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
776 map->min_insnno = 0;
777 map->max_insnno = inl_max_uid;
779 map->integrating = 1;
780 map->compare_src = NULL_RTX;
781 map->compare_mode = VOIDmode;
783 /* const_equiv_varray maps pseudos in our routine to constants, so
784 it needs to be large enough for all our pseudos. This is the
785 number we are currently using plus the number in the called
786 routine, plus 15 for each arg, five to compute the virtual frame
787 pointer, and five for the return value. This should be enough
788 for most cases. We do not reference entries outside the range of
789 the map.
791 ??? These numbers are quite arbitrary and were obtained by
792 experimentation. At some point, we should try to allocate the
793 table after all the parameters are set up so we an more accurately
794 estimate the number of pseudos we will need. */
796 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
797 (max_reg_num ()
798 + (max_regno - FIRST_PSEUDO_REGISTER)
799 + 15 * nargs
800 + 10),
801 "expand_inline_function");
802 map->const_age = 0;
804 /* Record the current insn in case we have to set up pointers to frame
805 and argument memory blocks. If there are no insns yet, add a dummy
806 insn that can be used as an insertion point. */
807 map->insns_at_start = get_last_insn ();
808 if (map->insns_at_start == 0)
809 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
811 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
812 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
814 /* Update the outgoing argument size to allow for those in the inlined
815 function. */
816 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
817 current_function_outgoing_args_size = inl_f->outgoing_args_size;
819 /* If the inline function needs to make PIC references, that means
820 that this function's PIC offset table must be used. */
821 if (inl_f->uses_pic_offset_table)
822 current_function_uses_pic_offset_table = 1;
824 /* If this function needs a context, set it up. */
825 if (inl_f->needs_context)
826 static_chain_value = lookup_static_chain (fndecl);
828 if (GET_CODE (parm_insns) == NOTE
829 && NOTE_LINE_NUMBER (parm_insns) > 0)
831 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
832 NOTE_LINE_NUMBER (parm_insns));
833 if (note)
834 RTX_INTEGRATED_P (note) = 1;
837 /* Process each argument. For each, set up things so that the function's
838 reference to the argument will refer to the argument being passed.
839 We only replace REG with REG here. Any simplifications are done
840 via const_equiv_map.
842 We make two passes: In the first, we deal with parameters that will
843 be placed into registers, since we need to ensure that the allocated
844 register number fits in const_equiv_map. Then we store all non-register
845 parameters into their memory location. */
847 /* Don't try to free temp stack slots here, because we may put one of the
848 parameters into a temp stack slot. */
850 for (i = 0; i < nargs; i++)
852 rtx copy = arg_vals[i];
854 loc = RTVEC_ELT (arg_vector, i);
856 /* There are three cases, each handled separately. */
857 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
858 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
860 /* This must be an object passed by invisible reference (it could
861 also be a variable-sized object, but we forbid inlining functions
862 with variable-sized arguments). COPY is the address of the
863 actual value (this computation will cause it to be copied). We
864 map that address for the register, noting the actual address as
865 an equivalent in case it can be substituted into the insns. */
867 if (GET_CODE (copy) != REG)
869 temp = copy_addr_to_reg (copy);
870 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
871 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
872 copy = temp;
874 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
876 else if (GET_CODE (loc) == MEM)
878 /* This is the case of a parameter that lives in memory. It
879 will live in the block we allocate in the called routine's
880 frame that simulates the incoming argument area. Do nothing
881 with the parameter now; we will call store_expr later. In
882 this case, however, we must ensure that the virtual stack and
883 incoming arg rtx values are expanded now so that we can be
884 sure we have enough slots in the const equiv map since the
885 store_expr call can easily blow the size estimate. */
886 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
887 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
889 else if (GET_CODE (loc) == REG)
890 process_reg_param (map, loc, copy);
891 else if (GET_CODE (loc) == CONCAT)
893 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
894 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
895 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
896 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
898 process_reg_param (map, locreal, copyreal);
899 process_reg_param (map, locimag, copyimag);
901 else
902 abort ();
905 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
906 specially. This function can be called recursively, so we need to
907 save the previous value. */
908 inlining_previous = inlining;
909 inlining = inl_f;
911 /* Now do the parameters that will be placed in memory. */
913 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
914 formal; formal = TREE_CHAIN (formal), i++)
916 loc = RTVEC_ELT (arg_vector, i);
918 if (GET_CODE (loc) == MEM
919 /* Exclude case handled above. */
920 && ! (GET_CODE (XEXP (loc, 0)) == REG
921 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
923 rtx note = emit_note (DECL_SOURCE_FILE (formal),
924 DECL_SOURCE_LINE (formal));
925 if (note)
926 RTX_INTEGRATED_P (note) = 1;
928 /* Compute the address in the area we reserved and store the
929 value there. */
930 temp = copy_rtx_and_substitute (loc, map, 1);
931 subst_constants (&temp, NULL_RTX, map, 1);
932 apply_change_group ();
933 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
934 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
935 store_expr (arg_trees[i], temp, 0);
939 /* Deal with the places that the function puts its result.
940 We are driven by what is placed into DECL_RESULT.
942 Initially, we assume that we don't have anything special handling for
943 REG_FUNCTION_RETURN_VALUE_P. */
945 map->inline_target = 0;
946 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
947 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
949 if (TYPE_MODE (type) == VOIDmode)
950 /* There is no return value to worry about. */
952 else if (GET_CODE (loc) == MEM)
954 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
956 temp = copy_rtx_and_substitute (loc, map, 1);
957 subst_constants (&temp, NULL_RTX, map, 1);
958 apply_change_group ();
959 target = temp;
961 else
963 if (! structure_value_addr
964 || ! aggregate_value_p (DECL_RESULT (fndecl)))
965 abort ();
967 /* Pass the function the address in which to return a structure
968 value. Note that a constructor can cause someone to call us
969 with STRUCTURE_VALUE_ADDR, but the initialization takes place
970 via the first parameter, rather than the struct return address.
972 We have two cases: If the address is a simple register
973 indirect, use the mapping mechanism to point that register to
974 our structure return address. Otherwise, store the structure
975 return value into the place that it will be referenced from. */
977 if (GET_CODE (XEXP (loc, 0)) == REG)
979 temp = force_operand (structure_value_addr, NULL_RTX);
980 temp = force_reg (Pmode, temp);
981 /* A virtual register might be invalid in an insn, because
982 it can cause trouble in reload. Since we don't have access
983 to the expanders at map translation time, make sure we have
984 a proper register now.
985 If a virtual register is actually valid, cse or combine
986 can put it into the mapped insns. */
987 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
988 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
989 temp = copy_to_mode_reg (Pmode, temp);
990 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
992 if (CONSTANT_P (structure_value_addr)
993 || GET_CODE (structure_value_addr) == ADDRESSOF
994 || (GET_CODE (structure_value_addr) == PLUS
995 && (XEXP (structure_value_addr, 0)
996 == virtual_stack_vars_rtx)
997 && (GET_CODE (XEXP (structure_value_addr, 1))
998 == CONST_INT)))
1000 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1001 CONST_AGE_PARM);
1004 else
1006 temp = copy_rtx_and_substitute (loc, map, 1);
1007 subst_constants (&temp, NULL_RTX, map, 0);
1008 apply_change_group ();
1009 emit_move_insn (temp, structure_value_addr);
1013 else if (ignore)
1014 /* We will ignore the result value, so don't look at its structure.
1015 Note that preparations for an aggregate return value
1016 do need to be made (above) even if it will be ignored. */
1018 else if (GET_CODE (loc) == REG)
1020 /* The function returns an object in a register and we use the return
1021 value. Set up our target for remapping. */
1023 /* Machine mode function was declared to return. */
1024 enum machine_mode departing_mode = TYPE_MODE (type);
1025 /* (Possibly wider) machine mode it actually computes
1026 (for the sake of callers that fail to declare it right).
1027 We have to use the mode of the result's RTL, rather than
1028 its type, since expand_function_start may have promoted it. */
1029 enum machine_mode arriving_mode
1030 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1031 rtx reg_to_map;
1033 /* Don't use MEMs as direct targets because on some machines
1034 substituting a MEM for a REG makes invalid insns.
1035 Let the combiner substitute the MEM if that is valid. */
1036 if (target == 0 || GET_CODE (target) != REG
1037 || GET_MODE (target) != departing_mode)
1039 /* Don't make BLKmode registers. If this looks like
1040 a BLKmode object being returned in a register, get
1041 the mode from that, otherwise abort. */
1042 if (departing_mode == BLKmode)
1044 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1046 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1047 arriving_mode = departing_mode;
1049 else
1050 abort ();
1053 target = gen_reg_rtx (departing_mode);
1056 /* If function's value was promoted before return,
1057 avoid machine mode mismatch when we substitute INLINE_TARGET.
1058 But TARGET is what we will return to the caller. */
1059 if (arriving_mode != departing_mode)
1061 /* Avoid creating a paradoxical subreg wider than
1062 BITS_PER_WORD, since that is illegal. */
1063 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1065 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1066 GET_MODE_BITSIZE (arriving_mode)))
1067 /* Maybe could be handled by using convert_move () ? */
1068 abort ();
1069 reg_to_map = gen_reg_rtx (arriving_mode);
1070 target = gen_lowpart (departing_mode, reg_to_map);
1072 else
1073 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1075 else
1076 reg_to_map = target;
1078 /* Usually, the result value is the machine's return register.
1079 Sometimes it may be a pseudo. Handle both cases. */
1080 if (REG_FUNCTION_VALUE_P (loc))
1081 map->inline_target = reg_to_map;
1082 else
1083 map->reg_map[REGNO (loc)] = reg_to_map;
1085 else
1086 abort ();
1088 /* Initialize label_map. get_label_from_map will actually make
1089 the labels. */
1090 memset ((char *) &map->label_map[min_labelno], 0,
1091 (max_labelno - min_labelno) * sizeof (rtx));
1093 /* Make copies of the decls of the symbols in the inline function, so that
1094 the copies of the variables get declared in the current function. Set
1095 up things so that lookup_static_chain knows that to interpret registers
1096 in SAVE_EXPRs for TYPE_SIZEs as local. */
1097 inline_function_decl = fndecl;
1098 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1099 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1100 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1101 inline_function_decl = 0;
1103 /* Make a fresh binding contour that we can easily remove. Do this after
1104 expanding our arguments so cleanups are properly scoped. */
1105 expand_start_bindings_and_block (0, block);
1107 /* Sort the block-map so that it will be easy to find remapped
1108 blocks later. */
1109 qsort (&VARRAY_TREE (map->block_map, 0),
1110 map->block_map->elements_used,
1111 sizeof (tree),
1112 compare_blocks);
1114 /* Perform postincrements before actually calling the function. */
1115 emit_queue ();
1117 /* Clean up stack so that variables might have smaller offsets. */
1118 do_pending_stack_adjust ();
1120 /* Save a copy of the location of const_equiv_varray for
1121 mark_stores, called via note_stores. */
1122 global_const_equiv_varray = map->const_equiv_varray;
1124 /* If the called function does an alloca, save and restore the
1125 stack pointer around the call. This saves stack space, but
1126 also is required if this inline is being done between two
1127 pushes. */
1128 if (inl_f->calls_alloca)
1129 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1131 /* Now copy the insns one by one. */
1132 copy_insn_list (insns, map, static_chain_value);
1134 /* Duplicate the EH regions. This will create an offset from the
1135 region numbers in the function we're inlining to the region
1136 numbers in the calling function. This must wait until after
1137 copy_insn_list, as we need the insn map to be complete. */
1138 eh_region_offset = duplicate_eh_regions (inl_f, map);
1140 /* Now copy the REG_NOTES for those insns. */
1141 copy_insn_notes (insns, map, eh_region_offset);
1143 /* If the insn sequence required one, emit the return label. */
1144 if (map->local_return_label)
1145 emit_label (map->local_return_label);
1147 /* Restore the stack pointer if we saved it above. */
1148 if (inl_f->calls_alloca)
1149 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1151 if (! cfun->x_whole_function_mode_p)
1152 /* In statement-at-a-time mode, we just tell the front-end to add
1153 this block to the list of blocks at this binding level. We
1154 can't do it the way it's done for function-at-a-time mode the
1155 superblocks have not been created yet. */
1156 insert_block (block);
1157 else
1159 BLOCK_CHAIN (block)
1160 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1161 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1164 /* End the scope containing the copied formal parameter variables
1165 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1166 here so that expand_end_bindings will not check for unused
1167 variables. That's already been checked for when the inlined
1168 function was defined. */
1169 expand_end_bindings (NULL_TREE, 1, 1);
1171 /* Must mark the line number note after inlined functions as a repeat, so
1172 that the test coverage code can avoid counting the call twice. This
1173 just tells the code to ignore the immediately following line note, since
1174 there already exists a copy of this note before the expanded inline call.
1175 This line number note is still needed for debugging though, so we can't
1176 delete it. */
1177 if (flag_test_coverage)
1178 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1180 emit_line_note (input_filename, lineno);
1182 /* If the function returns a BLKmode object in a register, copy it
1183 out of the temp register into a BLKmode memory object. */
1184 if (target
1185 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1186 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1187 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1189 if (structure_value_addr)
1191 target = gen_rtx_MEM (TYPE_MODE (type),
1192 memory_address (TYPE_MODE (type),
1193 structure_value_addr));
1194 set_mem_attributes (target, type, 1);
1197 /* Make sure we free the things we explicitly allocated with xmalloc. */
1198 if (real_label_map)
1199 free (real_label_map);
1200 VARRAY_FREE (map->const_equiv_varray);
1201 free (map->reg_map);
1202 VARRAY_FREE (map->block_map);
1203 free (map->insn_map);
1204 free (map);
1205 free (arg_vals);
1206 free (arg_trees);
1208 inlining = inlining_previous;
1210 return target;
1213 /* Make copies of each insn in the given list using the mapping
1214 computed in expand_inline_function. This function may call itself for
1215 insns containing sequences.
1217 Copying is done in two passes, first the insns and then their REG_NOTES.
1219 If static_chain_value is non-zero, it represents the context-pointer
1220 register for the function. */
1222 static void
1223 copy_insn_list (insns, map, static_chain_value)
1224 rtx insns;
1225 struct inline_remap *map;
1226 rtx static_chain_value;
1228 register int i;
1229 rtx insn;
1230 rtx temp;
1231 #ifdef HAVE_cc0
1232 rtx cc0_insn = 0;
1233 #endif
1235 /* Copy the insns one by one. Do this in two passes, first the insns and
1236 then their REG_NOTES. */
1238 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1240 for (insn = insns; insn; insn = NEXT_INSN (insn))
1242 rtx copy, pattern, set;
1244 map->orig_asm_operands_vector = 0;
1246 switch (GET_CODE (insn))
1248 case INSN:
1249 pattern = PATTERN (insn);
1250 set = single_set (insn);
1251 copy = 0;
1252 if (GET_CODE (pattern) == USE
1253 && GET_CODE (XEXP (pattern, 0)) == REG
1254 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1255 /* The (USE (REG n)) at return from the function should
1256 be ignored since we are changing (REG n) into
1257 inline_target. */
1258 break;
1260 /* Ignore setting a function value that we don't want to use. */
1261 if (map->inline_target == 0
1262 && set != 0
1263 && GET_CODE (SET_DEST (set)) == REG
1264 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1266 if (volatile_refs_p (SET_SRC (set)))
1268 rtx new_set;
1270 /* If we must not delete the source,
1271 load it into a new temporary. */
1272 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1274 new_set = single_set (copy);
1275 if (new_set == 0)
1276 abort ();
1278 SET_DEST (new_set)
1279 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1281 /* If the source and destination are the same and it
1282 has a note on it, keep the insn. */
1283 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1284 && REG_NOTES (insn) != 0)
1285 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1286 else
1287 break;
1290 /* Similarly if an ignored return value is clobbered. */
1291 else if (map->inline_target == 0
1292 && GET_CODE (pattern) == CLOBBER
1293 && GET_CODE (XEXP (pattern, 0)) == REG
1294 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1295 break;
1297 /* If this is setting the static chain rtx, omit it. */
1298 else if (static_chain_value != 0
1299 && set != 0
1300 && GET_CODE (SET_DEST (set)) == REG
1301 && rtx_equal_p (SET_DEST (set),
1302 static_chain_incoming_rtx))
1303 break;
1305 /* If this is setting the static chain pseudo, set it from
1306 the value we want to give it instead. */
1307 else if (static_chain_value != 0
1308 && set != 0
1309 && rtx_equal_p (SET_SRC (set),
1310 static_chain_incoming_rtx))
1312 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1314 copy = emit_move_insn (newdest, static_chain_value);
1315 static_chain_value = 0;
1318 /* If this is setting the virtual stack vars register, this must
1319 be the code at the handler for a builtin longjmp. The value
1320 saved in the setjmp buffer will be the address of the frame
1321 we've made for this inlined instance within our frame. But we
1322 know the offset of that value so we can use it to reconstruct
1323 our virtual stack vars register from that value. If we are
1324 copying it from the stack pointer, leave it unchanged. */
1325 else if (set != 0
1326 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1328 HOST_WIDE_INT offset;
1329 temp = map->reg_map[REGNO (SET_DEST (set))];
1330 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1331 REGNO (temp)).rtx;
1333 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1334 offset = 0;
1335 else if (GET_CODE (temp) == PLUS
1336 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1337 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1338 offset = INTVAL (XEXP (temp, 1));
1339 else
1340 abort ();
1342 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1343 temp = SET_SRC (set);
1344 else
1345 temp = force_operand (plus_constant (SET_SRC (set),
1346 - offset),
1347 NULL_RTX);
1349 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1352 else
1353 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1354 /* REG_NOTES will be copied later. */
1356 #ifdef HAVE_cc0
1357 /* If this insn is setting CC0, it may need to look at
1358 the insn that uses CC0 to see what type of insn it is.
1359 In that case, the call to recog via validate_change will
1360 fail. So don't substitute constants here. Instead,
1361 do it when we emit the following insn.
1363 For example, see the pyr.md file. That machine has signed and
1364 unsigned compares. The compare patterns must check the
1365 following branch insn to see which what kind of compare to
1366 emit.
1368 If the previous insn set CC0, substitute constants on it as
1369 well. */
1370 if (sets_cc0_p (PATTERN (copy)) != 0)
1371 cc0_insn = copy;
1372 else
1374 if (cc0_insn)
1375 try_constants (cc0_insn, map);
1376 cc0_insn = 0;
1377 try_constants (copy, map);
1379 #else
1380 try_constants (copy, map);
1381 #endif
1382 break;
1384 case JUMP_INSN:
1385 if (map->integrating && returnjump_p (insn))
1387 if (map->local_return_label == 0)
1388 map->local_return_label = gen_label_rtx ();
1389 pattern = gen_jump (map->local_return_label);
1391 else
1392 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1394 copy = emit_jump_insn (pattern);
1396 #ifdef HAVE_cc0
1397 if (cc0_insn)
1398 try_constants (cc0_insn, map);
1399 cc0_insn = 0;
1400 #endif
1401 try_constants (copy, map);
1403 /* If this used to be a conditional jump insn but whose branch
1404 direction is now know, we must do something special. */
1405 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1407 #ifdef HAVE_cc0
1408 /* If the previous insn set cc0 for us, delete it. */
1409 if (sets_cc0_p (PREV_INSN (copy)))
1410 delete_insn (PREV_INSN (copy));
1411 #endif
1413 /* If this is now a no-op, delete it. */
1414 if (map->last_pc_value == pc_rtx)
1416 delete_insn (copy);
1417 copy = 0;
1419 else
1420 /* Otherwise, this is unconditional jump so we must put a
1421 BARRIER after it. We could do some dead code elimination
1422 here, but jump.c will do it just as well. */
1423 emit_barrier ();
1425 break;
1427 case CALL_INSN:
1428 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1429 three attached sequences: normal call, sibling call and tail
1430 recursion. */
1431 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1433 rtx sequence[3];
1434 rtx tail_label;
1436 for (i = 0; i < 3; i++)
1438 rtx seq;
1440 sequence[i] = NULL_RTX;
1441 seq = XEXP (PATTERN (insn), i);
1442 if (seq)
1444 start_sequence ();
1445 copy_insn_list (seq, map, static_chain_value);
1446 sequence[i] = get_insns ();
1447 end_sequence ();
1451 /* Find the new tail recursion label.
1452 It will already be substituted into sequence[2]. */
1453 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1454 map, 0);
1456 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1457 sequence[0],
1458 sequence[1],
1459 sequence[2],
1460 tail_label));
1461 break;
1464 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1465 copy = emit_call_insn (pattern);
1467 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1468 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1470 /* Because the USAGE information potentially contains objects other
1471 than hard registers, we need to copy it. */
1473 CALL_INSN_FUNCTION_USAGE (copy)
1474 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1475 map, 0);
1477 #ifdef HAVE_cc0
1478 if (cc0_insn)
1479 try_constants (cc0_insn, map);
1480 cc0_insn = 0;
1481 #endif
1482 try_constants (copy, map);
1484 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1486 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1487 break;
1489 case CODE_LABEL:
1490 copy = emit_label (get_label_from_map (map,
1491 CODE_LABEL_NUMBER (insn)));
1492 LABEL_NAME (copy) = LABEL_NAME (insn);
1493 map->const_age++;
1494 break;
1496 case BARRIER:
1497 copy = emit_barrier ();
1498 break;
1500 case NOTE:
1501 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1502 discarded because it is important to have only one of
1503 each in the current function.
1505 NOTE_INSN_DELETED notes aren't useful.
1507 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1508 pointer (which will soon be dangling) confuses flow's
1509 attempts to preserve bb structures during the compilation
1510 of a function. */
1512 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1513 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1514 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1515 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1517 copy = emit_note (NOTE_SOURCE_FILE (insn),
1518 NOTE_LINE_NUMBER (insn));
1519 if (copy
1520 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1521 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1522 && NOTE_BLOCK (insn))
1524 tree *mapped_block_p;
1526 mapped_block_p
1527 = (tree *) bsearch (NOTE_BLOCK (insn),
1528 &VARRAY_TREE (map->block_map, 0),
1529 map->block_map->elements_used,
1530 sizeof (tree),
1531 find_block);
1533 if (!mapped_block_p)
1534 abort ();
1535 else
1536 NOTE_BLOCK (copy) = *mapped_block_p;
1539 else
1540 copy = 0;
1541 break;
1543 default:
1544 abort ();
1547 if (copy)
1548 RTX_INTEGRATED_P (copy) = 1;
1550 map->insn_map[INSN_UID (insn)] = copy;
1554 /* Copy the REG_NOTES. Increment const_age, so that only constants
1555 from parameters can be substituted in. These are the only ones
1556 that are valid across the entire function. */
1558 static void
1559 copy_insn_notes (insns, map, eh_region_offset)
1560 rtx insns;
1561 struct inline_remap *map;
1562 int eh_region_offset;
1564 rtx insn, new_insn;
1566 map->const_age++;
1567 for (insn = insns; insn; insn = NEXT_INSN (insn))
1569 if (! INSN_P (insn))
1570 continue;
1572 new_insn = map->insn_map[INSN_UID (insn)];
1573 if (! new_insn)
1574 continue;
1576 if (REG_NOTES (insn))
1578 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1580 /* We must also do subst_constants, in case one of our parameters
1581 has const type and constant value. */
1582 subst_constants (&note, NULL_RTX, map, 0);
1583 apply_change_group ();
1584 REG_NOTES (new_insn) = note;
1586 /* Delete any REG_LABEL notes from the chain. Remap any
1587 REG_EH_REGION notes. */
1588 for (; note; note = next)
1590 next = XEXP (note, 1);
1591 if (REG_NOTE_KIND (note) == REG_LABEL)
1592 remove_note (new_insn, note);
1593 else if (REG_NOTE_KIND (note) == REG_EH_REGION)
1594 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1595 + eh_region_offset);
1599 if (GET_CODE (insn) == CALL_INSN
1600 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1602 int i;
1603 for (i = 0; i < 3; i++)
1604 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1607 if (GET_CODE (insn) == JUMP_INSN
1608 && GET_CODE (PATTERN (insn)) == RESX)
1609 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1613 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1614 push all of those decls and give each one the corresponding home. */
1616 static void
1617 integrate_parm_decls (args, map, arg_vector)
1618 tree args;
1619 struct inline_remap *map;
1620 rtvec arg_vector;
1622 register tree tail;
1623 register int i;
1625 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1627 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1628 current_function_decl);
1629 rtx new_decl_rtl
1630 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1632 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1633 here, but that's going to require some more work. */
1634 /* DECL_INCOMING_RTL (decl) = ?; */
1635 /* Fully instantiate the address with the equivalent form so that the
1636 debugging information contains the actual register, instead of the
1637 virtual register. Do this by not passing an insn to
1638 subst_constants. */
1639 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1640 apply_change_group ();
1641 SET_DECL_RTL (decl, new_decl_rtl);
1645 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1646 current function a tree of contexts isomorphic to the one that is given.
1648 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1649 registers used in the DECL_RTL field should be remapped. If it is zero,
1650 no mapping is necessary. */
1652 static tree
1653 integrate_decl_tree (let, map)
1654 tree let;
1655 struct inline_remap *map;
1657 tree t;
1658 tree new_block;
1659 tree *next;
1661 new_block = make_node (BLOCK);
1662 VARRAY_PUSH_TREE (map->block_map, new_block);
1663 next = &BLOCK_VARS (new_block);
1665 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1667 tree d;
1669 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1671 if (DECL_RTL_SET_P (t))
1673 rtx r;
1675 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1677 /* Fully instantiate the address with the equivalent form so that the
1678 debugging information contains the actual register, instead of the
1679 virtual register. Do this by not passing an insn to
1680 subst_constants. */
1681 r = DECL_RTL (d);
1682 subst_constants (&r, NULL_RTX, map, 1);
1683 SET_DECL_RTL (d, r);
1684 apply_change_group ();
1687 /* Add this declaration to the list of variables in the new
1688 block. */
1689 *next = d;
1690 next = &TREE_CHAIN (d);
1693 next = &BLOCK_SUBBLOCKS (new_block);
1694 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1696 *next = integrate_decl_tree (t, map);
1697 BLOCK_SUPERCONTEXT (*next) = new_block;
1698 next = &BLOCK_CHAIN (*next);
1701 TREE_USED (new_block) = TREE_USED (let);
1702 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1704 return new_block;
1707 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1708 except for those few rtx codes that are sharable.
1710 We always return an rtx that is similar to that incoming rtx, with the
1711 exception of possibly changing a REG to a SUBREG or vice versa. No
1712 rtl is ever emitted.
1714 If FOR_LHS is nonzero, if means we are processing something that will
1715 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1716 inlining since we need to be conservative in how it is set for
1717 such cases.
1719 Handle constants that need to be placed in the constant pool by
1720 calling `force_const_mem'. */
1723 copy_rtx_and_substitute (orig, map, for_lhs)
1724 register rtx orig;
1725 struct inline_remap *map;
1726 int for_lhs;
1728 register rtx copy, temp;
1729 register int i, j;
1730 register RTX_CODE code;
1731 register enum machine_mode mode;
1732 register const char *format_ptr;
1733 int regno;
1735 if (orig == 0)
1736 return 0;
1738 code = GET_CODE (orig);
1739 mode = GET_MODE (orig);
1741 switch (code)
1743 case REG:
1744 /* If the stack pointer register shows up, it must be part of
1745 stack-adjustments (*not* because we eliminated the frame pointer!).
1746 Small hard registers are returned as-is. Pseudo-registers
1747 go through their `reg_map'. */
1748 regno = REGNO (orig);
1749 if (regno <= LAST_VIRTUAL_REGISTER
1750 || (map->integrating
1751 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1753 /* Some hard registers are also mapped,
1754 but others are not translated. */
1755 if (map->reg_map[regno] != 0
1756 /* We shouldn't usually have reg_map set for return
1757 register, but it may happen if we have leaf-register
1758 remapping and the return register is used in one of
1759 the calling sequences of a call_placeholer. In this
1760 case, we'll end up with a reg_map set for this
1761 register, but we don't want to use for registers
1762 marked as return values. */
1763 && ! REG_FUNCTION_VALUE_P (orig))
1764 return map->reg_map[regno];
1766 /* If this is the virtual frame pointer, make space in current
1767 function's stack frame for the stack frame of the inline function.
1769 Copy the address of this area into a pseudo. Map
1770 virtual_stack_vars_rtx to this pseudo and set up a constant
1771 equivalence for it to be the address. This will substitute the
1772 address into insns where it can be substituted and use the new
1773 pseudo where it can't. */
1774 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1776 rtx loc, seq;
1777 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1778 #ifdef FRAME_GROWS_DOWNWARD
1779 int alignment
1780 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1781 / BITS_PER_UNIT);
1783 /* In this case, virtual_stack_vars_rtx points to one byte
1784 higher than the top of the frame area. So make sure we
1785 allocate a big enough chunk to keep the frame pointer
1786 aligned like a real one. */
1787 if (alignment)
1788 size = CEIL_ROUND (size, alignment);
1789 #endif
1790 start_sequence ();
1791 loc = assign_stack_temp (BLKmode, size, 1);
1792 loc = XEXP (loc, 0);
1793 #ifdef FRAME_GROWS_DOWNWARD
1794 /* In this case, virtual_stack_vars_rtx points to one byte
1795 higher than the top of the frame area. So compute the offset
1796 to one byte higher than our substitute frame. */
1797 loc = plus_constant (loc, size);
1798 #endif
1799 map->reg_map[regno] = temp
1800 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1802 #ifdef STACK_BOUNDARY
1803 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1804 #endif
1806 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1808 seq = gen_sequence ();
1809 end_sequence ();
1810 emit_insn_after (seq, map->insns_at_start);
1811 return temp;
1813 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1814 || (map->integrating
1815 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1816 == orig)))
1818 /* Do the same for a block to contain any arguments referenced
1819 in memory. */
1820 rtx loc, seq;
1821 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1823 start_sequence ();
1824 loc = assign_stack_temp (BLKmode, size, 1);
1825 loc = XEXP (loc, 0);
1826 /* When arguments grow downward, the virtual incoming
1827 args pointer points to the top of the argument block,
1828 so the remapped location better do the same. */
1829 #ifdef ARGS_GROW_DOWNWARD
1830 loc = plus_constant (loc, size);
1831 #endif
1832 map->reg_map[regno] = temp
1833 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1835 #ifdef STACK_BOUNDARY
1836 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1837 #endif
1839 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1841 seq = gen_sequence ();
1842 end_sequence ();
1843 emit_insn_after (seq, map->insns_at_start);
1844 return temp;
1846 else if (REG_FUNCTION_VALUE_P (orig))
1848 /* This is a reference to the function return value. If
1849 the function doesn't have a return value, error. If the
1850 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1851 if (map->inline_target == 0)
1853 if (rtx_equal_function_value_matters)
1854 /* This is an ignored return value. We must not
1855 leave it in with REG_FUNCTION_VALUE_P set, since
1856 that would confuse subsequent inlining of the
1857 current function into a later function. */
1858 return gen_rtx_REG (GET_MODE (orig), regno);
1859 else
1860 /* Must be unrolling loops or replicating code if we
1861 reach here, so return the register unchanged. */
1862 return orig;
1864 else if (GET_MODE (map->inline_target) != BLKmode
1865 && mode != GET_MODE (map->inline_target))
1866 return gen_lowpart (mode, map->inline_target);
1867 else
1868 return map->inline_target;
1870 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1871 /* If leaf_renumber_regs_insn() might remap this register to
1872 some other number, make sure we don't share it with the
1873 inlined function, otherwise delayed optimization of the
1874 inlined function may change it in place, breaking our
1875 reference to it. We may still shared it within the
1876 function, so create an entry for this register in the
1877 reg_map. */
1878 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1879 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1881 temp = gen_rtx_REG (mode, regno);
1882 map->reg_map[regno] = temp;
1883 return temp;
1885 #endif
1886 else
1887 return orig;
1889 abort ();
1891 if (map->reg_map[regno] == NULL)
1893 map->reg_map[regno] = gen_reg_rtx (mode);
1894 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1895 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1896 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1897 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1899 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1900 mark_reg_pointer (map->reg_map[regno],
1901 map->regno_pointer_align[regno]);
1903 return map->reg_map[regno];
1905 case SUBREG:
1906 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1907 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1908 if (GET_CODE (copy) == SUBREG)
1910 int final_offset = SUBREG_BYTE (orig) + SUBREG_BYTE (copy);
1912 /* When working with SUBREGs the rule is that the byte
1913 offset must be a multiple of the SUBREG's mode. */
1914 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (orig)));
1915 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (orig)));
1916 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1917 final_offset);
1919 else if (GET_CODE (copy) == CONCAT)
1921 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1922 int final_offset;
1924 if (GET_MODE (retval) == GET_MODE (orig))
1925 return retval;
1927 final_offset = SUBREG_BYTE (orig) %
1928 GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)));
1929 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (orig)));
1930 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (orig)));
1931 return gen_rtx_SUBREG (GET_MODE (orig), retval, final_offset);
1933 else
1934 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1935 SUBREG_BYTE (orig));
1937 case ADDRESSOF:
1938 copy = gen_rtx_ADDRESSOF (mode,
1939 copy_rtx_and_substitute (XEXP (orig, 0),
1940 map, for_lhs),
1941 0, ADDRESSOF_DECL (orig));
1942 regno = ADDRESSOF_REGNO (orig);
1943 if (map->reg_map[regno])
1944 regno = REGNO (map->reg_map[regno]);
1945 else if (regno > LAST_VIRTUAL_REGISTER)
1947 temp = XEXP (orig, 0);
1948 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1949 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1950 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1951 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1952 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1954 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1955 mark_reg_pointer (map->reg_map[regno],
1956 map->regno_pointer_align[regno]);
1957 regno = REGNO (map->reg_map[regno]);
1959 ADDRESSOF_REGNO (copy) = regno;
1960 return copy;
1962 case USE:
1963 case CLOBBER:
1964 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1965 to (use foo) if the original insn didn't have a subreg.
1966 Removing the subreg distorts the VAX movstrhi pattern
1967 by changing the mode of an operand. */
1968 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1969 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1970 copy = SUBREG_REG (copy);
1971 return gen_rtx_fmt_e (code, VOIDmode, copy);
1973 case CODE_LABEL:
1974 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1975 = LABEL_PRESERVE_P (orig);
1976 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1978 /* We need to handle "deleted" labels that appear in the DECL_RTL
1979 of a LABEL_DECL. */
1980 case NOTE:
1981 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1982 return map->insn_map[INSN_UID (orig)];
1983 break;
1985 case LABEL_REF:
1986 copy
1987 = gen_rtx_LABEL_REF
1988 (mode,
1989 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1990 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1992 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1994 /* The fact that this label was previously nonlocal does not mean
1995 it still is, so we must check if it is within the range of
1996 this function's labels. */
1997 LABEL_REF_NONLOCAL_P (copy)
1998 = (LABEL_REF_NONLOCAL_P (orig)
1999 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2000 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2002 /* If we have made a nonlocal label local, it means that this
2003 inlined call will be referring to our nonlocal goto handler.
2004 So make sure we create one for this block; we normally would
2005 not since this is not otherwise considered a "call". */
2006 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2007 function_call_count++;
2009 return copy;
2011 case PC:
2012 case CC0:
2013 case CONST_INT:
2014 return orig;
2016 case SYMBOL_REF:
2017 /* Symbols which represent the address of a label stored in the constant
2018 pool must be modified to point to a constant pool entry for the
2019 remapped label. Otherwise, symbols are returned unchanged. */
2020 if (CONSTANT_POOL_ADDRESS_P (orig))
2022 struct function *f = inlining ? inlining : cfun;
2023 rtx constant = get_pool_constant_for_function (f, orig);
2024 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2025 if (inlining)
2027 rtx temp = force_const_mem (const_mode,
2028 copy_rtx_and_substitute (constant,
2029 map, 0));
2031 #if 0
2032 /* Legitimizing the address here is incorrect.
2034 Since we had a SYMBOL_REF before, we can assume it is valid
2035 to have one in this position in the insn.
2037 Also, change_address may create new registers. These
2038 registers will not have valid reg_map entries. This can
2039 cause try_constants() to fail because assumes that all
2040 registers in the rtx have valid reg_map entries, and it may
2041 end up replacing one of these new registers with junk. */
2043 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2044 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2045 #endif
2047 temp = XEXP (temp, 0);
2049 #ifdef POINTERS_EXTEND_UNSIGNED
2050 if (GET_MODE (temp) != GET_MODE (orig))
2051 temp = convert_memory_address (GET_MODE (orig), temp);
2052 #endif
2053 return temp;
2055 else if (GET_CODE (constant) == LABEL_REF)
2056 return XEXP (force_const_mem
2057 (GET_MODE (orig),
2058 copy_rtx_and_substitute (constant, map, for_lhs)),
2062 return orig;
2064 case CONST_DOUBLE:
2065 /* We have to make a new copy of this CONST_DOUBLE because don't want
2066 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2067 duplicate of a CONST_DOUBLE we have already seen. */
2068 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2070 REAL_VALUE_TYPE d;
2072 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2073 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2075 else
2076 return immed_double_const (CONST_DOUBLE_LOW (orig),
2077 CONST_DOUBLE_HIGH (orig), VOIDmode);
2079 case CONST:
2080 /* Make new constant pool entry for a constant
2081 that was in the pool of the inline function. */
2082 if (RTX_INTEGRATED_P (orig))
2083 abort ();
2084 break;
2086 case ASM_OPERANDS:
2087 /* If a single asm insn contains multiple output operands then
2088 it contains multiple ASM_OPERANDS rtx's that share the input
2089 and constraint vecs. We must make sure that the copied insn
2090 continues to share it. */
2091 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2093 copy = rtx_alloc (ASM_OPERANDS);
2094 copy->volatil = orig->volatil;
2095 PUT_MODE (copy, GET_MODE (orig));
2096 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2097 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2098 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2099 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2100 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2101 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2102 = map->copy_asm_constraints_vector;
2103 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2104 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2105 return copy;
2107 break;
2109 case CALL:
2110 /* This is given special treatment because the first
2111 operand of a CALL is a (MEM ...) which may get
2112 forced into a register for cse. This is undesirable
2113 if function-address cse isn't wanted or if we won't do cse. */
2114 #ifndef NO_FUNCTION_CSE
2115 if (! (optimize && ! flag_no_function_cse))
2116 #endif
2117 return
2118 gen_rtx_CALL
2119 (GET_MODE (orig),
2120 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2121 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2122 map, 0)),
2123 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2124 break;
2126 #if 0
2127 /* Must be ifdefed out for loop unrolling to work. */
2128 case RETURN:
2129 abort ();
2130 #endif
2132 case SET:
2133 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2134 Adjust the setting by the offset of the area we made.
2135 If the nonlocal goto is into the current function,
2136 this will result in unnecessarily bad code, but should work. */
2137 if (SET_DEST (orig) == virtual_stack_vars_rtx
2138 || SET_DEST (orig) == virtual_incoming_args_rtx)
2140 /* In case a translation hasn't occurred already, make one now. */
2141 rtx equiv_reg;
2142 rtx equiv_loc;
2143 HOST_WIDE_INT loc_offset;
2145 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2146 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2147 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2148 REGNO (equiv_reg)).rtx;
2149 loc_offset
2150 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2152 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2153 force_operand
2154 (plus_constant
2155 (copy_rtx_and_substitute (SET_SRC (orig),
2156 map, 0),
2157 - loc_offset),
2158 NULL_RTX));
2160 else
2161 return gen_rtx_SET (VOIDmode,
2162 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2163 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2164 break;
2166 case MEM:
2167 if (inlining
2168 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2169 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2171 enum machine_mode const_mode
2172 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2173 rtx constant
2174 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2176 constant = copy_rtx_and_substitute (constant, map, 0);
2178 /* If this was an address of a constant pool entry that itself
2179 had to be placed in the constant pool, it might not be a
2180 valid address. So the recursive call might have turned it
2181 into a register. In that case, it isn't a constant any
2182 more, so return it. This has the potential of changing a
2183 MEM into a REG, but we'll assume that it safe. */
2184 if (! CONSTANT_P (constant))
2185 return constant;
2187 return validize_mem (force_const_mem (const_mode, constant));
2190 copy = rtx_alloc (MEM);
2191 PUT_MODE (copy, mode);
2192 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2193 MEM_COPY_ATTRIBUTES (copy, orig);
2194 return copy;
2196 default:
2197 break;
2200 copy = rtx_alloc (code);
2201 PUT_MODE (copy, mode);
2202 copy->in_struct = orig->in_struct;
2203 copy->volatil = orig->volatil;
2204 copy->unchanging = orig->unchanging;
2206 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2208 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2210 switch (*format_ptr++)
2212 case '0':
2213 /* Copy this through the wide int field; that's safest. */
2214 X0WINT (copy, i) = X0WINT (orig, i);
2215 break;
2217 case 'e':
2218 XEXP (copy, i)
2219 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2220 break;
2222 case 'u':
2223 /* Change any references to old-insns to point to the
2224 corresponding copied insns. */
2225 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2226 break;
2228 case 'E':
2229 XVEC (copy, i) = XVEC (orig, i);
2230 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2232 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2233 for (j = 0; j < XVECLEN (copy, i); j++)
2234 XVECEXP (copy, i, j)
2235 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2236 map, for_lhs);
2238 break;
2240 case 'w':
2241 XWINT (copy, i) = XWINT (orig, i);
2242 break;
2244 case 'i':
2245 XINT (copy, i) = XINT (orig, i);
2246 break;
2248 case 's':
2249 XSTR (copy, i) = XSTR (orig, i);
2250 break;
2252 case 't':
2253 XTREE (copy, i) = XTREE (orig, i);
2254 break;
2256 default:
2257 abort ();
2261 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2263 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2264 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2265 map->copy_asm_constraints_vector
2266 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2269 return copy;
2272 /* Substitute known constant values into INSN, if that is valid. */
2274 void
2275 try_constants (insn, map)
2276 rtx insn;
2277 struct inline_remap *map;
2279 int i;
2281 map->num_sets = 0;
2283 /* First try just updating addresses, then other things. This is
2284 important when we have something like the store of a constant
2285 into memory and we can update the memory address but the machine
2286 does not support a constant source. */
2287 subst_constants (&PATTERN (insn), insn, map, 1);
2288 apply_change_group ();
2289 subst_constants (&PATTERN (insn), insn, map, 0);
2290 apply_change_group ();
2292 /* Show we don't know the value of anything stored or clobbered. */
2293 note_stores (PATTERN (insn), mark_stores, NULL);
2294 map->last_pc_value = 0;
2295 #ifdef HAVE_cc0
2296 map->last_cc0_value = 0;
2297 #endif
2299 /* Set up any constant equivalences made in this insn. */
2300 for (i = 0; i < map->num_sets; i++)
2302 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2304 int regno = REGNO (map->equiv_sets[i].dest);
2306 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2307 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2308 /* Following clause is a hack to make case work where GNU C++
2309 reassigns a variable to make cse work right. */
2310 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2311 regno).rtx,
2312 map->equiv_sets[i].equiv))
2313 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2314 map->equiv_sets[i].equiv, map->const_age);
2316 else if (map->equiv_sets[i].dest == pc_rtx)
2317 map->last_pc_value = map->equiv_sets[i].equiv;
2318 #ifdef HAVE_cc0
2319 else if (map->equiv_sets[i].dest == cc0_rtx)
2320 map->last_cc0_value = map->equiv_sets[i].equiv;
2321 #endif
2325 /* Substitute known constants for pseudo regs in the contents of LOC,
2326 which are part of INSN.
2327 If INSN is zero, the substitution should always be done (this is used to
2328 update DECL_RTL).
2329 These changes are taken out by try_constants if the result is not valid.
2331 Note that we are more concerned with determining when the result of a SET
2332 is a constant, for further propagation, than actually inserting constants
2333 into insns; cse will do the latter task better.
2335 This function is also used to adjust address of items previously addressed
2336 via the virtual stack variable or virtual incoming arguments registers.
2338 If MEMONLY is nonzero, only make changes inside a MEM. */
2340 static void
2341 subst_constants (loc, insn, map, memonly)
2342 rtx *loc;
2343 rtx insn;
2344 struct inline_remap *map;
2345 int memonly;
2347 rtx x = *loc;
2348 register int i, j;
2349 register enum rtx_code code;
2350 register const char *format_ptr;
2351 int num_changes = num_validated_changes ();
2352 rtx new = 0;
2353 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2355 code = GET_CODE (x);
2357 switch (code)
2359 case PC:
2360 case CONST_INT:
2361 case CONST_DOUBLE:
2362 case SYMBOL_REF:
2363 case CONST:
2364 case LABEL_REF:
2365 case ADDRESS:
2366 return;
2368 #ifdef HAVE_cc0
2369 case CC0:
2370 if (! memonly)
2371 validate_change (insn, loc, map->last_cc0_value, 1);
2372 return;
2373 #endif
2375 case USE:
2376 case CLOBBER:
2377 /* The only thing we can do with a USE or CLOBBER is possibly do
2378 some substitutions in a MEM within it. */
2379 if (GET_CODE (XEXP (x, 0)) == MEM)
2380 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2381 return;
2383 case REG:
2384 /* Substitute for parms and known constants. Don't replace
2385 hard regs used as user variables with constants. */
2386 if (! memonly)
2388 int regno = REGNO (x);
2389 struct const_equiv_data *p;
2391 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2392 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2393 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2394 p->rtx != 0)
2395 && p->age >= map->const_age)
2396 validate_change (insn, loc, p->rtx, 1);
2398 return;
2400 case SUBREG:
2401 /* SUBREG applied to something other than a reg
2402 should be treated as ordinary, since that must
2403 be a special hack and we don't know how to treat it specially.
2404 Consider for example mulsidi3 in m68k.md.
2405 Ordinary SUBREG of a REG needs this special treatment. */
2406 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2408 rtx inner = SUBREG_REG (x);
2409 rtx new = 0;
2411 /* We can't call subst_constants on &SUBREG_REG (x) because any
2412 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2413 see what is inside, try to form the new SUBREG and see if that is
2414 valid. We handle two cases: extracting a full word in an
2415 integral mode and extracting the low part. */
2416 subst_constants (&inner, NULL_RTX, map, 0);
2418 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2419 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2420 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2421 new = operand_subword (inner, SUBREG_BYTE (x) / UNITS_PER_WORD,
2422 0, GET_MODE (SUBREG_REG (x)));
2424 cancel_changes (num_changes);
2425 if (new == 0 && subreg_lowpart_p (x))
2426 new = gen_lowpart_common (GET_MODE (x), inner);
2428 if (new)
2429 validate_change (insn, loc, new, 1);
2431 return;
2433 break;
2435 case MEM:
2436 subst_constants (&XEXP (x, 0), insn, map, 0);
2438 /* If a memory address got spoiled, change it back. */
2439 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2440 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2441 cancel_changes (num_changes);
2442 return;
2444 case SET:
2446 /* Substitute constants in our source, and in any arguments to a
2447 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2448 itself. */
2449 rtx *dest_loc = &SET_DEST (x);
2450 rtx dest = *dest_loc;
2451 rtx src, tem;
2452 enum machine_mode compare_mode = VOIDmode;
2454 /* If SET_SRC is a COMPARE which subst_constants would turn into
2455 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2456 is to be done. */
2457 if (GET_CODE (SET_SRC (x)) == COMPARE)
2459 src = SET_SRC (x);
2460 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2461 #ifdef HAVE_cc0
2462 || dest == cc0_rtx
2463 #endif
2466 compare_mode = GET_MODE (XEXP (src, 0));
2467 if (compare_mode == VOIDmode)
2468 compare_mode = GET_MODE (XEXP (src, 1));
2472 subst_constants (&SET_SRC (x), insn, map, memonly);
2473 src = SET_SRC (x);
2475 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2476 || GET_CODE (*dest_loc) == SUBREG
2477 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2479 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2481 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2482 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2484 dest_loc = &XEXP (*dest_loc, 0);
2487 /* Do substitute in the address of a destination in memory. */
2488 if (GET_CODE (*dest_loc) == MEM)
2489 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2491 /* Check for the case of DEST a SUBREG, both it and the underlying
2492 register are less than one word, and the SUBREG has the wider mode.
2493 In the case, we are really setting the underlying register to the
2494 source converted to the mode of DEST. So indicate that. */
2495 if (GET_CODE (dest) == SUBREG
2496 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2497 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2498 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2499 <= GET_MODE_SIZE (GET_MODE (dest)))
2500 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2501 src)))
2502 src = tem, dest = SUBREG_REG (dest);
2504 /* If storing a recognizable value save it for later recording. */
2505 if ((map->num_sets < MAX_RECOG_OPERANDS)
2506 && (CONSTANT_P (src)
2507 || (GET_CODE (src) == REG
2508 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2509 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2510 || (GET_CODE (src) == PLUS
2511 && GET_CODE (XEXP (src, 0)) == REG
2512 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2513 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2514 && CONSTANT_P (XEXP (src, 1)))
2515 || GET_CODE (src) == COMPARE
2516 #ifdef HAVE_cc0
2517 || dest == cc0_rtx
2518 #endif
2519 || (dest == pc_rtx
2520 && (src == pc_rtx || GET_CODE (src) == RETURN
2521 || GET_CODE (src) == LABEL_REF))))
2523 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2524 it will cause us to save the COMPARE with any constants
2525 substituted, which is what we want for later. */
2526 rtx src_copy = copy_rtx (src);
2527 map->equiv_sets[map->num_sets].equiv = src_copy;
2528 map->equiv_sets[map->num_sets++].dest = dest;
2529 if (compare_mode != VOIDmode
2530 && GET_CODE (src) == COMPARE
2531 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2532 #ifdef HAVE_cc0
2533 || dest == cc0_rtx
2534 #endif
2536 && GET_MODE (XEXP (src, 0)) == VOIDmode
2537 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2539 map->compare_src = src_copy;
2540 map->compare_mode = compare_mode;
2544 return;
2546 default:
2547 break;
2550 format_ptr = GET_RTX_FORMAT (code);
2552 /* If the first operand is an expression, save its mode for later. */
2553 if (*format_ptr == 'e')
2554 op0_mode = GET_MODE (XEXP (x, 0));
2556 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2558 switch (*format_ptr++)
2560 case '0':
2561 break;
2563 case 'e':
2564 if (XEXP (x, i))
2565 subst_constants (&XEXP (x, i), insn, map, memonly);
2566 break;
2568 case 'u':
2569 case 'i':
2570 case 's':
2571 case 'w':
2572 case 'n':
2573 case 't':
2574 break;
2576 case 'E':
2577 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2578 for (j = 0; j < XVECLEN (x, i); j++)
2579 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2581 break;
2583 default:
2584 abort ();
2588 /* If this is a commutative operation, move a constant to the second
2589 operand unless the second operand is already a CONST_INT. */
2590 if (! memonly
2591 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2592 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2594 rtx tem = XEXP (x, 0);
2595 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2596 validate_change (insn, &XEXP (x, 1), tem, 1);
2599 /* Simplify the expression in case we put in some constants. */
2600 if (! memonly)
2601 switch (GET_RTX_CLASS (code))
2603 case '1':
2604 if (op0_mode == MAX_MACHINE_MODE)
2605 abort ();
2606 new = simplify_unary_operation (code, GET_MODE (x),
2607 XEXP (x, 0), op0_mode);
2608 break;
2610 case '<':
2612 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2614 if (op_mode == VOIDmode)
2615 op_mode = GET_MODE (XEXP (x, 1));
2616 new = simplify_relational_operation (code, op_mode,
2617 XEXP (x, 0), XEXP (x, 1));
2618 #ifdef FLOAT_STORE_FLAG_VALUE
2619 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2621 enum machine_mode mode = GET_MODE (x);
2622 if (new == const0_rtx)
2623 new = CONST0_RTX (mode);
2624 else
2626 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2627 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2630 #endif
2631 break;
2634 case '2':
2635 case 'c':
2636 new = simplify_binary_operation (code, GET_MODE (x),
2637 XEXP (x, 0), XEXP (x, 1));
2638 break;
2640 case 'b':
2641 case '3':
2642 if (op0_mode == MAX_MACHINE_MODE)
2643 abort ();
2645 if (code == IF_THEN_ELSE)
2647 rtx op0 = XEXP (x, 0);
2649 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2650 && GET_MODE (op0) == VOIDmode
2651 && ! side_effects_p (op0)
2652 && XEXP (op0, 0) == map->compare_src
2653 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2655 /* We have compare of two VOIDmode constants for which
2656 we recorded the comparison mode. */
2657 rtx temp =
2658 simplify_relational_operation (GET_CODE (op0),
2659 map->compare_mode,
2660 XEXP (op0, 0),
2661 XEXP (op0, 1));
2663 if (temp == const0_rtx)
2664 new = XEXP (x, 2);
2665 else if (temp == const1_rtx)
2666 new = XEXP (x, 1);
2669 if (!new)
2670 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2671 XEXP (x, 0), XEXP (x, 1),
2672 XEXP (x, 2));
2673 break;
2676 if (new)
2677 validate_change (insn, loc, new, 1);
2680 /* Show that register modified no longer contain known constants. We are
2681 called from note_stores with parts of the new insn. */
2683 static void
2684 mark_stores (dest, x, data)
2685 rtx dest;
2686 rtx x ATTRIBUTE_UNUSED;
2687 void *data ATTRIBUTE_UNUSED;
2689 int regno = -1;
2690 enum machine_mode mode = VOIDmode;
2692 /* DEST is always the innermost thing set, except in the case of
2693 SUBREGs of hard registers. */
2695 if (GET_CODE (dest) == REG)
2696 regno = REGNO (dest), mode = GET_MODE (dest);
2697 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2699 regno = REGNO (SUBREG_REG (dest));
2700 if (regno < FIRST_PSEUDO_REGISTER)
2701 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2702 GET_MODE (SUBREG_REG (dest)),
2703 SUBREG_BYTE (dest),
2704 GET_MODE (dest));
2705 mode = GET_MODE (SUBREG_REG (dest));
2708 if (regno >= 0)
2710 unsigned int uregno = regno;
2711 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2712 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2713 unsigned int i;
2715 /* Ignore virtual stack var or virtual arg register since those
2716 are handled separately. */
2717 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2718 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2719 for (i = uregno; i <= last_reg; i++)
2720 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2721 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2725 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2726 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2727 that it points to the node itself, thus indicating that the node is its
2728 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2729 the given node is NULL, recursively descend the decl/block tree which
2730 it is the root of, and for each other ..._DECL or BLOCK node contained
2731 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2732 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2733 values to point to themselves. */
2735 static void
2736 set_block_origin_self (stmt)
2737 register tree stmt;
2739 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2741 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2744 register tree local_decl;
2746 for (local_decl = BLOCK_VARS (stmt);
2747 local_decl != NULL_TREE;
2748 local_decl = TREE_CHAIN (local_decl))
2749 set_decl_origin_self (local_decl); /* Potential recursion. */
2753 register tree subblock;
2755 for (subblock = BLOCK_SUBBLOCKS (stmt);
2756 subblock != NULL_TREE;
2757 subblock = BLOCK_CHAIN (subblock))
2758 set_block_origin_self (subblock); /* Recurse. */
2763 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2764 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2765 node to so that it points to the node itself, thus indicating that the
2766 node represents its own (abstract) origin. Additionally, if the
2767 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2768 the decl/block tree of which the given node is the root of, and for
2769 each other ..._DECL or BLOCK node contained therein whose
2770 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2771 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2772 point to themselves. */
2774 void
2775 set_decl_origin_self (decl)
2776 register tree decl;
2778 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2780 DECL_ABSTRACT_ORIGIN (decl) = decl;
2781 if (TREE_CODE (decl) == FUNCTION_DECL)
2783 register tree arg;
2785 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2786 DECL_ABSTRACT_ORIGIN (arg) = arg;
2787 if (DECL_INITIAL (decl) != NULL_TREE
2788 && DECL_INITIAL (decl) != error_mark_node)
2789 set_block_origin_self (DECL_INITIAL (decl));
2794 /* Given a pointer to some BLOCK node, and a boolean value to set the
2795 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2796 the given block, and for all local decls and all local sub-blocks
2797 (recursively) which are contained therein. */
2799 static void
2800 set_block_abstract_flags (stmt, setting)
2801 register tree stmt;
2802 register int setting;
2804 register tree local_decl;
2805 register tree subblock;
2807 BLOCK_ABSTRACT (stmt) = setting;
2809 for (local_decl = BLOCK_VARS (stmt);
2810 local_decl != NULL_TREE;
2811 local_decl = TREE_CHAIN (local_decl))
2812 set_decl_abstract_flags (local_decl, setting);
2814 for (subblock = BLOCK_SUBBLOCKS (stmt);
2815 subblock != NULL_TREE;
2816 subblock = BLOCK_CHAIN (subblock))
2817 set_block_abstract_flags (subblock, setting);
2820 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2821 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2822 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2823 set the abstract flags for all of the parameters, local vars, local
2824 blocks and sub-blocks (recursively) to the same setting. */
2826 void
2827 set_decl_abstract_flags (decl, setting)
2828 register tree decl;
2829 register int setting;
2831 DECL_ABSTRACT (decl) = setting;
2832 if (TREE_CODE (decl) == FUNCTION_DECL)
2834 register tree arg;
2836 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2837 DECL_ABSTRACT (arg) = setting;
2838 if (DECL_INITIAL (decl) != NULL_TREE
2839 && DECL_INITIAL (decl) != error_mark_node)
2840 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2844 /* Output the assembly language code for the function FNDECL
2845 from its DECL_SAVED_INSNS. Used for inline functions that are output
2846 at end of compilation instead of where they came in the source. */
2848 void
2849 output_inline_function (fndecl)
2850 tree fndecl;
2852 struct function *old_cfun = cfun;
2853 enum debug_info_type old_write_symbols = write_symbols;
2854 struct function *f = DECL_SAVED_INSNS (fndecl);
2856 cfun = f;
2857 current_function_decl = fndecl;
2858 clear_emit_caches ();
2860 set_new_last_label_num (f->inl_max_label_num);
2862 /* We're not deferring this any longer. */
2863 DECL_DEFER_OUTPUT (fndecl) = 0;
2865 /* If requested, suppress debugging information. */
2866 if (f->no_debugging_symbols)
2867 write_symbols = NO_DEBUG;
2869 /* Do any preparation, such as emitting abstract debug info for the inline
2870 before it gets mangled by optimization. */
2871 note_outlining_of_inline_function (fndecl);
2873 /* Compile this function all the way down to assembly code. */
2874 rest_of_compilation (fndecl);
2876 /* We can't inline this anymore. */
2877 f->inlinable = 0;
2878 DECL_INLINE (fndecl) = 0;
2880 cfun = old_cfun;
2881 current_function_decl = old_cfun ? old_cfun->decl : 0;
2882 write_symbols = old_write_symbols;