2001-03-22 Alexandre Petit-Bianco <apbianco@redhat.com>
[official-gcc.git] / gcc / integrate.c
blobf91f9f15c37e89397868945e9537d71f08344403
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "intl.h"
41 #include "loop.h"
42 #include "params.h"
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
48 extern struct obstack *function_maybepermanent_obstack;
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 /* Decide whether a function with a target specific attribute
66 attached can be inlined. By default we disallow this. */
67 #ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68 #define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
69 #endif
71 static rtvec initialize_for_inline PARAMS ((tree));
72 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
74 rtvec));
75 static tree integrate_decl_tree PARAMS ((tree,
76 struct inline_remap *));
77 static void subst_constants PARAMS ((rtx *, rtx,
78 struct inline_remap *, int));
79 static void set_block_origin_self PARAMS ((tree));
80 static void set_block_abstract_flags PARAMS ((tree, int));
81 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
82 rtx));
83 void set_decl_abstract_flags PARAMS ((tree, int));
84 static rtx expand_inline_function_eh_labelmap PARAMS ((rtx));
85 static void mark_stores PARAMS ((rtx, rtx, void *));
86 static void save_parm_insns PARAMS ((rtx, rtx));
87 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
88 rtx));
89 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *));
90 static int compare_blocks PARAMS ((const PTR, const PTR));
91 static int find_block PARAMS ((const PTR, const PTR));
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
109 rtx x = map->label_map[i];
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx ();
114 return x;
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
129 /* For functions marked as inline increase the maximum size to
130 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (MAX_INLINE_INSNS
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
138 register int ninsns = 0;
139 register tree parms;
141 if (DECL_UNINLINABLE (fndecl))
142 return N_("function cannot be inline");
144 /* No inlines with varargs. */
145 if ((last && TREE_VALUE (last) != void_type_node)
146 || current_function_varargs)
147 return N_("varargs function cannot be inline");
149 if (current_function_calls_alloca)
150 return N_("function using alloca cannot be inline");
152 if (current_function_calls_setjmp)
153 return N_("function using setjmp cannot be inline");
155 if (current_function_contains_functions)
156 return N_("function with nested functions cannot be inline");
158 if (forced_labels)
159 return
160 N_("function with label addresses used in initializers cannot inline");
162 if (current_function_cannot_inline)
163 return current_function_cannot_inline;
165 /* If its not even close, don't even look. */
166 if (get_max_uid () > 3 * max_insns)
167 return N_("function too large to be inline");
169 #if 0
170 /* Don't inline functions which do not specify a function prototype and
171 have BLKmode argument or take the address of a parameter. */
172 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
174 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
175 TREE_ADDRESSABLE (parms) = 1;
176 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
177 return N_("no prototype, and parameter address used; cannot be inline");
179 #endif
181 /* We can't inline functions that return structures
182 the old-fashioned PCC way, copying into a static block. */
183 if (current_function_returns_pcc_struct)
184 return N_("inline functions not supported for this return value type");
186 /* We can't inline functions that return structures of varying size. */
187 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
188 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
189 return N_("function with varying-size return value cannot be inline");
191 /* Cannot inline a function with a varying size argument or one that
192 receives a transparent union. */
193 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
195 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
196 return N_("function with varying-size parameter cannot be inline");
197 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
198 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
199 return N_("function with transparent unit parameter cannot be inline");
202 if (get_max_uid () > max_insns)
204 for (ninsns = 0, insn = get_first_nonparm_insn ();
205 insn && ninsns < max_insns;
206 insn = NEXT_INSN (insn))
207 if (INSN_P (insn))
208 ninsns++;
210 if (ninsns >= max_insns)
211 return N_("function too large to be inline");
214 /* We will not inline a function which uses computed goto. The addresses of
215 its local labels, which may be tucked into global storage, are of course
216 not constant across instantiations, which causes unexpected behaviour. */
217 if (current_function_has_computed_jump)
218 return N_("function with computed jump cannot inline");
220 /* We cannot inline a nested function that jumps to a nonlocal label. */
221 if (current_function_has_nonlocal_goto)
222 return N_("function with nonlocal goto cannot be inline");
224 /* This is a hack, until the inliner is taught about eh regions at
225 the start of the function. */
226 for (insn = get_insns ();
227 insn
228 && ! (GET_CODE (insn) == NOTE
229 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
230 insn = NEXT_INSN (insn))
232 if (insn && GET_CODE (insn) == NOTE
233 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
234 return N_("function with complex parameters cannot be inline");
237 /* We can't inline functions that return a PARALLEL rtx. */
238 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
240 rtx result = DECL_RTL (DECL_RESULT (fndecl));
241 if (GET_CODE (result) == PARALLEL)
242 return N_("inline functions not supported for this return value type");
245 /* If the function has a target specific attribute attached to it,
246 then we assume that we should not inline it. This can be overriden
247 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
248 if (DECL_MACHINE_ATTRIBUTES (fndecl)
249 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
250 return N_("function with target specific attribute(s) cannot be inlined");
252 return NULL;
255 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
256 Zero for a reg that isn't a parm's home.
257 Only reg numbers less than max_parm_reg are mapped here. */
258 static tree *parmdecl_map;
260 /* In save_for_inline, nonzero if past the parm-initialization insns. */
261 static int in_nonparm_insns;
263 /* Subroutine for `save_for_inline'. Performs initialization
264 needed to save FNDECL's insns and info for future inline expansion. */
266 static rtvec
267 initialize_for_inline (fndecl)
268 tree fndecl;
270 int i;
271 rtvec arg_vector;
272 tree parms;
274 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
275 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
276 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
278 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
279 parms;
280 parms = TREE_CHAIN (parms), i++)
282 rtx p = DECL_RTL (parms);
284 /* If we have (mem (addressof (mem ...))), use the inner MEM since
285 otherwise the copy_rtx call below will not unshare the MEM since
286 it shares ADDRESSOF. */
287 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
288 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
289 p = XEXP (XEXP (p, 0), 0);
291 RTVEC_ELT (arg_vector, i) = p;
293 if (GET_CODE (p) == REG)
294 parmdecl_map[REGNO (p)] = parms;
295 else if (GET_CODE (p) == CONCAT)
297 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
298 rtx pimag = gen_imagpart (GET_MODE (preal), p);
300 if (GET_CODE (preal) == REG)
301 parmdecl_map[REGNO (preal)] = parms;
302 if (GET_CODE (pimag) == REG)
303 parmdecl_map[REGNO (pimag)] = parms;
306 /* This flag is cleared later
307 if the function ever modifies the value of the parm. */
308 TREE_READONLY (parms) = 1;
311 return arg_vector;
314 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
315 originally was in the FROM_FN, but now it will be in the
316 TO_FN. */
318 tree
319 copy_decl_for_inlining (decl, from_fn, to_fn)
320 tree decl;
321 tree from_fn;
322 tree to_fn;
324 tree copy;
326 /* Copy the declaration. */
327 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
329 /* For a parameter, we must make an equivalent VAR_DECL, not a
330 new PARM_DECL. */
331 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
332 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
333 TREE_READONLY (copy) = TREE_READONLY (decl);
334 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
336 else
338 copy = copy_node (decl);
339 if (DECL_LANG_SPECIFIC (copy))
340 copy_lang_decl (copy);
342 /* TREE_ADDRESSABLE isn't used to indicate that a label's
343 address has been taken; it's for internal bookkeeping in
344 expand_goto_internal. */
345 if (TREE_CODE (copy) == LABEL_DECL)
346 TREE_ADDRESSABLE (copy) = 0;
349 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
350 declaration inspired this copy. */
351 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
353 /* The new variable/label has no RTL, yet. */
354 SET_DECL_RTL (copy, NULL_RTX);
356 /* These args would always appear unused, if not for this. */
357 TREE_USED (copy) = 1;
359 /* Set the context for the new declaration. */
360 if (!DECL_CONTEXT (decl))
361 /* Globals stay global. */
363 else if (DECL_CONTEXT (decl) != from_fn)
364 /* Things that weren't in the scope of the function we're inlining
365 from aren't in the scope we're inlining too, either. */
367 else if (TREE_STATIC (decl))
368 /* Function-scoped static variables should say in the original
369 function. */
371 else
372 /* Ordinary automatic local variables are now in the scope of the
373 new function. */
374 DECL_CONTEXT (copy) = to_fn;
376 return copy;
379 /* Make the insns and PARM_DECLs of the current function permanent
380 and record other information in DECL_SAVED_INSNS to allow inlining
381 of this function in subsequent calls.
383 This routine need not copy any insns because we are not going
384 to immediately compile the insns in the insn chain. There
385 are two cases when we would compile the insns for FNDECL:
386 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
387 be output at the end of other compilation, because somebody took
388 its address. In the first case, the insns of FNDECL are copied
389 as it is expanded inline, so FNDECL's saved insns are not
390 modified. In the second case, FNDECL is used for the last time,
391 so modifying the rtl is not a problem.
393 We don't have to worry about FNDECL being inline expanded by
394 other functions which are written at the end of compilation
395 because flag_no_inline is turned on when we begin writing
396 functions at the end of compilation. */
398 void
399 save_for_inline (fndecl)
400 tree fndecl;
402 rtx insn;
403 rtvec argvec;
404 rtx first_nonparm_insn;
406 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
407 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
408 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
409 for the parms, prior to elimination of virtual registers.
410 These values are needed for substituting parms properly. */
412 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
414 /* Make and emit a return-label if we have not already done so. */
416 if (return_label == 0)
418 return_label = gen_label_rtx ();
419 emit_label (return_label);
422 argvec = initialize_for_inline (fndecl);
424 /* If there are insns that copy parms from the stack into pseudo registers,
425 those insns are not copied. `expand_inline_function' must
426 emit the correct code to handle such things. */
428 insn = get_insns ();
429 if (GET_CODE (insn) != NOTE)
430 abort ();
432 /* Get the insn which signals the end of parameter setup code. */
433 first_nonparm_insn = get_first_nonparm_insn ();
435 /* Now just scan the chain of insns to see what happens to our
436 PARM_DECLs. If a PARM_DECL is used but never modified, we
437 can substitute its rtl directly when expanding inline (and
438 perform constant folding when its incoming value is constant).
439 Otherwise, we have to copy its value into a new register and track
440 the new register's life. */
441 in_nonparm_insns = 0;
442 save_parm_insns (insn, first_nonparm_insn);
444 cfun->inl_max_label_num = max_label_num ();
445 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
446 cfun->original_arg_vector = argvec;
447 cfun->original_decl_initial = DECL_INITIAL (fndecl);
448 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
449 DECL_SAVED_INSNS (fndecl) = cfun;
451 /* Clean up. */
452 free (parmdecl_map);
455 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
456 PARM_DECL is used but never modified, we can substitute its rtl directly
457 when expanding inline (and perform constant folding when its incoming
458 value is constant). Otherwise, we have to copy its value into a new
459 register and track the new register's life. */
461 static void
462 save_parm_insns (insn, first_nonparm_insn)
463 rtx insn;
464 rtx first_nonparm_insn;
466 if (insn == NULL_RTX)
467 return;
469 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
471 if (insn == first_nonparm_insn)
472 in_nonparm_insns = 1;
474 if (INSN_P (insn))
476 /* Record what interesting things happen to our parameters. */
477 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
479 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
480 three attached sequences: normal call, sibling call and tail
481 recursion. */
482 if (GET_CODE (insn) == CALL_INSN
483 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
485 int i;
487 for (i = 0; i < 3; i++)
488 save_parm_insns (XEXP (PATTERN (insn), i),
489 first_nonparm_insn);
495 /* Note whether a parameter is modified or not. */
497 static void
498 note_modified_parmregs (reg, x, data)
499 rtx reg;
500 rtx x ATTRIBUTE_UNUSED;
501 void *data ATTRIBUTE_UNUSED;
503 if (GET_CODE (reg) == REG && in_nonparm_insns
504 && REGNO (reg) < max_parm_reg
505 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
506 && parmdecl_map[REGNO (reg)] != 0)
507 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
510 /* Unfortunately, we need a global copy of const_equiv map for communication
511 with a function called from note_stores. Be *very* careful that this
512 is used properly in the presence of recursion. */
514 varray_type global_const_equiv_varray;
516 #define FIXED_BASE_PLUS_P(X) \
517 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
518 && GET_CODE (XEXP (X, 0)) == REG \
519 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
520 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
522 /* Called to set up a mapping for the case where a parameter is in a
523 register. If it is read-only and our argument is a constant, set up the
524 constant equivalence.
526 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
527 if it is a register.
529 Also, don't allow hard registers here; they might not be valid when
530 substituted into insns. */
531 static void
532 process_reg_param (map, loc, copy)
533 struct inline_remap *map;
534 rtx loc, copy;
536 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
537 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
538 && ! REG_USERVAR_P (copy))
539 || (GET_CODE (copy) == REG
540 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
542 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
543 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
544 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
545 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
546 copy = temp;
548 map->reg_map[REGNO (loc)] = copy;
551 /* Used by duplicate_eh_handlers to map labels for the exception table */
552 static struct inline_remap *eif_eh_map;
554 static rtx
555 expand_inline_function_eh_labelmap (label)
556 rtx label;
558 int index = CODE_LABEL_NUMBER (label);
559 return get_label_from_map (eif_eh_map, index);
562 /* Compare two BLOCKs for qsort. The key we sort on is the
563 BLOCK_ABSTRACT_ORIGIN of the blocks. */
565 static int
566 compare_blocks (v1, v2)
567 const PTR v1;
568 const PTR v2;
570 tree b1 = *((const tree *) v1);
571 tree b2 = *((const tree *) v2);
573 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
574 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
577 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
578 an original block; the second to a remapped equivalent. */
580 static int
581 find_block (v1, v2)
582 const PTR v1;
583 const PTR v2;
585 const union tree_node *b1 = (const union tree_node *) v1;
586 tree b2 = *((const tree *) v2);
588 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
591 /* Integrate the procedure defined by FNDECL. Note that this function
592 may wind up calling itself. Since the static variables are not
593 reentrant, we do not assign them until after the possibility
594 of recursion is eliminated.
596 If IGNORE is nonzero, do not produce a value.
597 Otherwise store the value in TARGET if it is nonzero and that is convenient.
599 Value is:
600 (rtx)-1 if we could not substitute the function
601 0 if we substituted it and it does not produce a value
602 else an rtx for where the value is stored. */
605 expand_inline_function (fndecl, parms, target, ignore, type,
606 structure_value_addr)
607 tree fndecl, parms;
608 rtx target;
609 int ignore;
610 tree type;
611 rtx structure_value_addr;
613 struct function *inlining_previous;
614 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
615 tree formal, actual, block;
616 rtx parm_insns = inl_f->emit->x_first_insn;
617 rtx insns = (inl_f->inl_last_parm_insn
618 ? NEXT_INSN (inl_f->inl_last_parm_insn)
619 : parm_insns);
620 tree *arg_trees;
621 rtx *arg_vals;
622 int max_regno;
623 register int i;
624 int min_labelno = inl_f->emit->x_first_label_num;
625 int max_labelno = inl_f->inl_max_label_num;
626 int nargs;
627 rtx loc;
628 rtx stack_save = 0;
629 rtx temp;
630 struct inline_remap *map = 0;
631 #ifdef HAVE_cc0
632 rtx cc0_insn = 0;
633 #endif
634 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
635 rtx static_chain_value = 0;
636 int inl_max_uid;
638 /* The pointer used to track the true location of the memory used
639 for MAP->LABEL_MAP. */
640 rtx *real_label_map = 0;
642 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
643 max_regno = inl_f->emit->x_reg_rtx_no + 3;
644 if (max_regno < FIRST_PSEUDO_REGISTER)
645 abort ();
647 /* Pull out the decl for the function definition; fndecl may be a
648 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
649 fndecl = inl_f->decl;
651 nargs = list_length (DECL_ARGUMENTS (fndecl));
653 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
654 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
656 /* Check that the parms type match and that sufficient arguments were
657 passed. Since the appropriate conversions or default promotions have
658 already been applied, the machine modes should match exactly. */
660 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
661 formal;
662 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
664 tree arg;
665 enum machine_mode mode;
667 if (actual == 0)
668 return (rtx) (HOST_WIDE_INT) -1;
670 arg = TREE_VALUE (actual);
671 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
673 if (mode != TYPE_MODE (TREE_TYPE (arg))
674 /* If they are block mode, the types should match exactly.
675 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
676 which could happen if the parameter has incomplete type. */
677 || (mode == BLKmode
678 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
679 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
680 return (rtx) (HOST_WIDE_INT) -1;
683 /* Extra arguments are valid, but will be ignored below, so we must
684 evaluate them here for side-effects. */
685 for (; actual; actual = TREE_CHAIN (actual))
686 expand_expr (TREE_VALUE (actual), const0_rtx,
687 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
689 /* Expand the function arguments. Do this first so that any
690 new registers get created before we allocate the maps. */
692 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
693 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
695 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
696 formal;
697 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
699 /* Actual parameter, converted to the type of the argument within the
700 function. */
701 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
702 /* Mode of the variable used within the function. */
703 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
704 int invisiref = 0;
706 arg_trees[i] = arg;
707 loc = RTVEC_ELT (arg_vector, i);
709 /* If this is an object passed by invisible reference, we copy the
710 object into a stack slot and save its address. If this will go
711 into memory, we do nothing now. Otherwise, we just expand the
712 argument. */
713 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
714 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
716 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
718 store_expr (arg, stack_slot, 0);
719 arg_vals[i] = XEXP (stack_slot, 0);
720 invisiref = 1;
722 else if (GET_CODE (loc) != MEM)
724 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
725 /* The mode if LOC and ARG can differ if LOC was a variable
726 that had its mode promoted via PROMOTED_MODE. */
727 arg_vals[i] = convert_modes (GET_MODE (loc),
728 TYPE_MODE (TREE_TYPE (arg)),
729 expand_expr (arg, NULL_RTX, mode,
730 EXPAND_SUM),
731 TREE_UNSIGNED (TREE_TYPE (formal)));
732 else
733 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
735 else
736 arg_vals[i] = 0;
738 if (arg_vals[i] != 0
739 && (! TREE_READONLY (formal)
740 /* If the parameter is not read-only, copy our argument through
741 a register. Also, we cannot use ARG_VALS[I] if it overlaps
742 TARGET in any way. In the inline function, they will likely
743 be two different pseudos, and `safe_from_p' will make all
744 sorts of smart assumptions about their not conflicting.
745 But if ARG_VALS[I] overlaps TARGET, these assumptions are
746 wrong, so put ARG_VALS[I] into a fresh register.
747 Don't worry about invisible references, since their stack
748 temps will never overlap the target. */
749 || (target != 0
750 && ! invisiref
751 && (GET_CODE (arg_vals[i]) == REG
752 || GET_CODE (arg_vals[i]) == SUBREG
753 || GET_CODE (arg_vals[i]) == MEM)
754 && reg_overlap_mentioned_p (arg_vals[i], target))
755 /* ??? We must always copy a SUBREG into a REG, because it might
756 get substituted into an address, and not all ports correctly
757 handle SUBREGs in addresses. */
758 || (GET_CODE (arg_vals[i]) == SUBREG)))
759 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
761 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
762 && POINTER_TYPE_P (TREE_TYPE (formal)))
763 mark_reg_pointer (arg_vals[i],
764 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
767 /* Allocate the structures we use to remap things. */
769 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
770 map->fndecl = fndecl;
772 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
773 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
775 /* We used to use alloca here, but the size of what it would try to
776 allocate would occasionally cause it to exceed the stack limit and
777 cause unpredictable core dumps. */
778 real_label_map
779 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
780 map->label_map = real_label_map;
781 map->local_return_label = NULL_RTX;
783 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
784 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
785 map->min_insnno = 0;
786 map->max_insnno = inl_max_uid;
788 map->integrating = 1;
789 map->compare_src = NULL_RTX;
790 map->compare_mode = VOIDmode;
792 /* const_equiv_varray maps pseudos in our routine to constants, so
793 it needs to be large enough for all our pseudos. This is the
794 number we are currently using plus the number in the called
795 routine, plus 15 for each arg, five to compute the virtual frame
796 pointer, and five for the return value. This should be enough
797 for most cases. We do not reference entries outside the range of
798 the map.
800 ??? These numbers are quite arbitrary and were obtained by
801 experimentation. At some point, we should try to allocate the
802 table after all the parameters are set up so we an more accurately
803 estimate the number of pseudos we will need. */
805 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
806 (max_reg_num ()
807 + (max_regno - FIRST_PSEUDO_REGISTER)
808 + 15 * nargs
809 + 10),
810 "expand_inline_function");
811 map->const_age = 0;
813 /* Record the current insn in case we have to set up pointers to frame
814 and argument memory blocks. If there are no insns yet, add a dummy
815 insn that can be used as an insertion point. */
816 map->insns_at_start = get_last_insn ();
817 if (map->insns_at_start == 0)
818 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
820 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
821 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
823 /* Update the outgoing argument size to allow for those in the inlined
824 function. */
825 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
826 current_function_outgoing_args_size = inl_f->outgoing_args_size;
828 /* If the inline function needs to make PIC references, that means
829 that this function's PIC offset table must be used. */
830 if (inl_f->uses_pic_offset_table)
831 current_function_uses_pic_offset_table = 1;
833 /* If this function needs a context, set it up. */
834 if (inl_f->needs_context)
835 static_chain_value = lookup_static_chain (fndecl);
837 if (GET_CODE (parm_insns) == NOTE
838 && NOTE_LINE_NUMBER (parm_insns) > 0)
840 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
841 NOTE_LINE_NUMBER (parm_insns));
842 if (note)
843 RTX_INTEGRATED_P (note) = 1;
846 /* Process each argument. For each, set up things so that the function's
847 reference to the argument will refer to the argument being passed.
848 We only replace REG with REG here. Any simplifications are done
849 via const_equiv_map.
851 We make two passes: In the first, we deal with parameters that will
852 be placed into registers, since we need to ensure that the allocated
853 register number fits in const_equiv_map. Then we store all non-register
854 parameters into their memory location. */
856 /* Don't try to free temp stack slots here, because we may put one of the
857 parameters into a temp stack slot. */
859 for (i = 0; i < nargs; i++)
861 rtx copy = arg_vals[i];
863 loc = RTVEC_ELT (arg_vector, i);
865 /* There are three cases, each handled separately. */
866 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
867 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
869 /* This must be an object passed by invisible reference (it could
870 also be a variable-sized object, but we forbid inlining functions
871 with variable-sized arguments). COPY is the address of the
872 actual value (this computation will cause it to be copied). We
873 map that address for the register, noting the actual address as
874 an equivalent in case it can be substituted into the insns. */
876 if (GET_CODE (copy) != REG)
878 temp = copy_addr_to_reg (copy);
879 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
880 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
881 copy = temp;
883 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
885 else if (GET_CODE (loc) == MEM)
887 /* This is the case of a parameter that lives in memory. It
888 will live in the block we allocate in the called routine's
889 frame that simulates the incoming argument area. Do nothing
890 with the parameter now; we will call store_expr later. In
891 this case, however, we must ensure that the virtual stack and
892 incoming arg rtx values are expanded now so that we can be
893 sure we have enough slots in the const equiv map since the
894 store_expr call can easily blow the size estimate. */
895 if (DECL_FRAME_SIZE (fndecl) != 0)
896 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
898 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
899 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
901 else if (GET_CODE (loc) == REG)
902 process_reg_param (map, loc, copy);
903 else if (GET_CODE (loc) == CONCAT)
905 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
906 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
907 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
908 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
910 process_reg_param (map, locreal, copyreal);
911 process_reg_param (map, locimag, copyimag);
913 else
914 abort ();
917 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
918 specially. This function can be called recursively, so we need to
919 save the previous value. */
920 inlining_previous = inlining;
921 inlining = inl_f;
923 /* Now do the parameters that will be placed in memory. */
925 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
926 formal; formal = TREE_CHAIN (formal), i++)
928 loc = RTVEC_ELT (arg_vector, i);
930 if (GET_CODE (loc) == MEM
931 /* Exclude case handled above. */
932 && ! (GET_CODE (XEXP (loc, 0)) == REG
933 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
935 rtx note = emit_note (DECL_SOURCE_FILE (formal),
936 DECL_SOURCE_LINE (formal));
937 if (note)
938 RTX_INTEGRATED_P (note) = 1;
940 /* Compute the address in the area we reserved and store the
941 value there. */
942 temp = copy_rtx_and_substitute (loc, map, 1);
943 subst_constants (&temp, NULL_RTX, map, 1);
944 apply_change_group ();
945 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
946 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
947 store_expr (arg_trees[i], temp, 0);
951 /* Deal with the places that the function puts its result.
952 We are driven by what is placed into DECL_RESULT.
954 Initially, we assume that we don't have anything special handling for
955 REG_FUNCTION_RETURN_VALUE_P. */
957 map->inline_target = 0;
958 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
959 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
961 if (TYPE_MODE (type) == VOIDmode)
962 /* There is no return value to worry about. */
964 else if (GET_CODE (loc) == MEM)
966 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
968 temp = copy_rtx_and_substitute (loc, map, 1);
969 subst_constants (&temp, NULL_RTX, map, 1);
970 apply_change_group ();
971 target = temp;
973 else
975 if (! structure_value_addr
976 || ! aggregate_value_p (DECL_RESULT (fndecl)))
977 abort ();
979 /* Pass the function the address in which to return a structure
980 value. Note that a constructor can cause someone to call us
981 with STRUCTURE_VALUE_ADDR, but the initialization takes place
982 via the first parameter, rather than the struct return address.
984 We have two cases: If the address is a simple register
985 indirect, use the mapping mechanism to point that register to
986 our structure return address. Otherwise, store the structure
987 return value into the place that it will be referenced from. */
989 if (GET_CODE (XEXP (loc, 0)) == REG)
991 temp = force_operand (structure_value_addr, NULL_RTX);
992 temp = force_reg (Pmode, temp);
993 /* A virtual register might be invalid in an insn, because
994 it can cause trouble in reload. Since we don't have access
995 to the expanders at map translation time, make sure we have
996 a proper register now.
997 If a virtual register is actually valid, cse or combine
998 can put it into the mapped insns. */
999 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1000 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1001 temp = copy_to_mode_reg (Pmode, temp);
1002 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1004 if (CONSTANT_P (structure_value_addr)
1005 || GET_CODE (structure_value_addr) == ADDRESSOF
1006 || (GET_CODE (structure_value_addr) == PLUS
1007 && (XEXP (structure_value_addr, 0)
1008 == virtual_stack_vars_rtx)
1009 && (GET_CODE (XEXP (structure_value_addr, 1))
1010 == CONST_INT)))
1012 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1013 CONST_AGE_PARM);
1016 else
1018 temp = copy_rtx_and_substitute (loc, map, 1);
1019 subst_constants (&temp, NULL_RTX, map, 0);
1020 apply_change_group ();
1021 emit_move_insn (temp, structure_value_addr);
1025 else if (ignore)
1026 /* We will ignore the result value, so don't look at its structure.
1027 Note that preparations for an aggregate return value
1028 do need to be made (above) even if it will be ignored. */
1030 else if (GET_CODE (loc) == REG)
1032 /* The function returns an object in a register and we use the return
1033 value. Set up our target for remapping. */
1035 /* Machine mode function was declared to return. */
1036 enum machine_mode departing_mode = TYPE_MODE (type);
1037 /* (Possibly wider) machine mode it actually computes
1038 (for the sake of callers that fail to declare it right).
1039 We have to use the mode of the result's RTL, rather than
1040 its type, since expand_function_start may have promoted it. */
1041 enum machine_mode arriving_mode
1042 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1043 rtx reg_to_map;
1045 /* Don't use MEMs as direct targets because on some machines
1046 substituting a MEM for a REG makes invalid insns.
1047 Let the combiner substitute the MEM if that is valid. */
1048 if (target == 0 || GET_CODE (target) != REG
1049 || GET_MODE (target) != departing_mode)
1051 /* Don't make BLKmode registers. If this looks like
1052 a BLKmode object being returned in a register, get
1053 the mode from that, otherwise abort. */
1054 if (departing_mode == BLKmode)
1056 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1058 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1059 arriving_mode = departing_mode;
1061 else
1062 abort ();
1065 target = gen_reg_rtx (departing_mode);
1068 /* If function's value was promoted before return,
1069 avoid machine mode mismatch when we substitute INLINE_TARGET.
1070 But TARGET is what we will return to the caller. */
1071 if (arriving_mode != departing_mode)
1073 /* Avoid creating a paradoxical subreg wider than
1074 BITS_PER_WORD, since that is illegal. */
1075 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1077 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1078 GET_MODE_BITSIZE (arriving_mode)))
1079 /* Maybe could be handled by using convert_move () ? */
1080 abort ();
1081 reg_to_map = gen_reg_rtx (arriving_mode);
1082 target = gen_lowpart (departing_mode, reg_to_map);
1084 else
1085 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1087 else
1088 reg_to_map = target;
1090 /* Usually, the result value is the machine's return register.
1091 Sometimes it may be a pseudo. Handle both cases. */
1092 if (REG_FUNCTION_VALUE_P (loc))
1093 map->inline_target = reg_to_map;
1094 else
1095 map->reg_map[REGNO (loc)] = reg_to_map;
1097 else
1098 abort ();
1100 /* Initialize label_map. get_label_from_map will actually make
1101 the labels. */
1102 memset ((char *) &map->label_map[min_labelno], 0,
1103 (max_labelno - min_labelno) * sizeof (rtx));
1105 /* Make copies of the decls of the symbols in the inline function, so that
1106 the copies of the variables get declared in the current function. Set
1107 up things so that lookup_static_chain knows that to interpret registers
1108 in SAVE_EXPRs for TYPE_SIZEs as local. */
1109 inline_function_decl = fndecl;
1110 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1111 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1112 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1113 inline_function_decl = 0;
1115 /* Make a fresh binding contour that we can easily remove. Do this after
1116 expanding our arguments so cleanups are properly scoped. */
1117 expand_start_bindings_and_block (0, block);
1119 /* Sort the block-map so that it will be easy to find remapped
1120 blocks later. */
1121 qsort (&VARRAY_TREE (map->block_map, 0),
1122 map->block_map->elements_used,
1123 sizeof (tree),
1124 compare_blocks);
1126 /* Perform postincrements before actually calling the function. */
1127 emit_queue ();
1129 /* Clean up stack so that variables might have smaller offsets. */
1130 do_pending_stack_adjust ();
1132 /* Save a copy of the location of const_equiv_varray for
1133 mark_stores, called via note_stores. */
1134 global_const_equiv_varray = map->const_equiv_varray;
1136 /* If the called function does an alloca, save and restore the
1137 stack pointer around the call. This saves stack space, but
1138 also is required if this inline is being done between two
1139 pushes. */
1140 if (inl_f->calls_alloca)
1141 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1143 /* Now copy the insns one by one. */
1144 copy_insn_list (insns, map, static_chain_value);
1146 /* Now copy the REG_NOTES for those insns. */
1147 copy_insn_notes (insns, map);
1149 /* If the insn sequence required one, emit the return label. */
1150 if (map->local_return_label)
1151 emit_label (map->local_return_label);
1153 /* Restore the stack pointer if we saved it above. */
1154 if (inl_f->calls_alloca)
1155 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1157 if (! cfun->x_whole_function_mode_p)
1158 /* In statement-at-a-time mode, we just tell the front-end to add
1159 this block to the list of blocks at this binding level. We
1160 can't do it the way it's done for function-at-a-time mode the
1161 superblocks have not been created yet. */
1162 insert_block (block);
1163 else
1165 BLOCK_CHAIN (block)
1166 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1167 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1170 /* End the scope containing the copied formal parameter variables
1171 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1172 here so that expand_end_bindings will not check for unused
1173 variables. That's already been checked for when the inlined
1174 function was defined. */
1175 expand_end_bindings (NULL_TREE, 1, 1);
1177 /* Must mark the line number note after inlined functions as a repeat, so
1178 that the test coverage code can avoid counting the call twice. This
1179 just tells the code to ignore the immediately following line note, since
1180 there already exists a copy of this note before the expanded inline call.
1181 This line number note is still needed for debugging though, so we can't
1182 delete it. */
1183 if (flag_test_coverage)
1184 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1186 emit_line_note (input_filename, lineno);
1188 /* If the function returns a BLKmode object in a register, copy it
1189 out of the temp register into a BLKmode memory object. */
1190 if (target
1191 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1192 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1193 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1195 if (structure_value_addr)
1197 target = gen_rtx_MEM (TYPE_MODE (type),
1198 memory_address (TYPE_MODE (type),
1199 structure_value_addr));
1200 set_mem_attributes (target, type, 1);
1203 /* Make sure we free the things we explicitly allocated with xmalloc. */
1204 if (real_label_map)
1205 free (real_label_map);
1206 VARRAY_FREE (map->const_equiv_varray);
1207 free (map->reg_map);
1208 VARRAY_FREE (map->block_map);
1209 free (map->insn_map);
1210 free (map);
1211 free (arg_vals);
1212 free (arg_trees);
1214 inlining = inlining_previous;
1216 return target;
1219 /* Make copies of each insn in the given list using the mapping
1220 computed in expand_inline_function. This function may call itself for
1221 insns containing sequences.
1223 Copying is done in two passes, first the insns and then their REG_NOTES.
1225 If static_chain_value is non-zero, it represents the context-pointer
1226 register for the function. */
1228 static void
1229 copy_insn_list (insns, map, static_chain_value)
1230 rtx insns;
1231 struct inline_remap *map;
1232 rtx static_chain_value;
1234 register int i;
1235 rtx insn;
1236 rtx temp;
1237 #ifdef HAVE_cc0
1238 rtx cc0_insn = 0;
1239 #endif
1241 /* Copy the insns one by one. Do this in two passes, first the insns and
1242 then their REG_NOTES. */
1244 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1246 for (insn = insns; insn; insn = NEXT_INSN (insn))
1248 rtx copy, pattern, set;
1250 map->orig_asm_operands_vector = 0;
1252 switch (GET_CODE (insn))
1254 case INSN:
1255 pattern = PATTERN (insn);
1256 set = single_set (insn);
1257 copy = 0;
1258 if (GET_CODE (pattern) == USE
1259 && GET_CODE (XEXP (pattern, 0)) == REG
1260 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1261 /* The (USE (REG n)) at return from the function should
1262 be ignored since we are changing (REG n) into
1263 inline_target. */
1264 break;
1266 /* If the inline fn needs eh context, make sure that
1267 the current fn has one. */
1268 if (GET_CODE (pattern) == USE
1269 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1270 get_eh_context ();
1272 /* Ignore setting a function value that we don't want to use. */
1273 if (map->inline_target == 0
1274 && set != 0
1275 && GET_CODE (SET_DEST (set)) == REG
1276 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1278 if (volatile_refs_p (SET_SRC (set)))
1280 rtx new_set;
1282 /* If we must not delete the source,
1283 load it into a new temporary. */
1284 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1286 new_set = single_set (copy);
1287 if (new_set == 0)
1288 abort ();
1290 SET_DEST (new_set)
1291 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1293 /* If the source and destination are the same and it
1294 has a note on it, keep the insn. */
1295 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1296 && REG_NOTES (insn) != 0)
1297 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1298 else
1299 break;
1302 /* Similarly if an ignored return value is clobbered. */
1303 else if (map->inline_target == 0
1304 && GET_CODE (pattern) == CLOBBER
1305 && GET_CODE (XEXP (pattern, 0)) == REG
1306 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1307 break;
1309 /* If this is setting the static chain rtx, omit it. */
1310 else if (static_chain_value != 0
1311 && set != 0
1312 && GET_CODE (SET_DEST (set)) == REG
1313 && rtx_equal_p (SET_DEST (set),
1314 static_chain_incoming_rtx))
1315 break;
1317 /* If this is setting the static chain pseudo, set it from
1318 the value we want to give it instead. */
1319 else if (static_chain_value != 0
1320 && set != 0
1321 && rtx_equal_p (SET_SRC (set),
1322 static_chain_incoming_rtx))
1324 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1326 copy = emit_move_insn (newdest, static_chain_value);
1327 static_chain_value = 0;
1330 /* If this is setting the virtual stack vars register, this must
1331 be the code at the handler for a builtin longjmp. The value
1332 saved in the setjmp buffer will be the address of the frame
1333 we've made for this inlined instance within our frame. But we
1334 know the offset of that value so we can use it to reconstruct
1335 our virtual stack vars register from that value. If we are
1336 copying it from the stack pointer, leave it unchanged. */
1337 else if (set != 0
1338 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1340 HOST_WIDE_INT offset;
1341 temp = map->reg_map[REGNO (SET_DEST (set))];
1342 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1343 REGNO (temp)).rtx;
1345 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1346 offset = 0;
1347 else if (GET_CODE (temp) == PLUS
1348 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1349 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1350 offset = INTVAL (XEXP (temp, 1));
1351 else
1352 abort ();
1354 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1355 temp = SET_SRC (set);
1356 else
1357 temp = force_operand (plus_constant (SET_SRC (set),
1358 - offset),
1359 NULL_RTX);
1361 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1364 else
1365 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1366 /* REG_NOTES will be copied later. */
1368 #ifdef HAVE_cc0
1369 /* If this insn is setting CC0, it may need to look at
1370 the insn that uses CC0 to see what type of insn it is.
1371 In that case, the call to recog via validate_change will
1372 fail. So don't substitute constants here. Instead,
1373 do it when we emit the following insn.
1375 For example, see the pyr.md file. That machine has signed and
1376 unsigned compares. The compare patterns must check the
1377 following branch insn to see which what kind of compare to
1378 emit.
1380 If the previous insn set CC0, substitute constants on it as
1381 well. */
1382 if (sets_cc0_p (PATTERN (copy)) != 0)
1383 cc0_insn = copy;
1384 else
1386 if (cc0_insn)
1387 try_constants (cc0_insn, map);
1388 cc0_insn = 0;
1389 try_constants (copy, map);
1391 #else
1392 try_constants (copy, map);
1393 #endif
1394 break;
1396 case JUMP_INSN:
1397 if (GET_CODE (PATTERN (insn)) == RETURN
1398 || (GET_CODE (PATTERN (insn)) == PARALLEL
1399 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1401 if (map->local_return_label == 0)
1402 map->local_return_label = gen_label_rtx ();
1403 pattern = gen_jump (map->local_return_label);
1405 else
1406 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1408 copy = emit_jump_insn (pattern);
1410 #ifdef HAVE_cc0
1411 if (cc0_insn)
1412 try_constants (cc0_insn, map);
1413 cc0_insn = 0;
1414 #endif
1415 try_constants (copy, map);
1417 /* If this used to be a conditional jump insn but whose branch
1418 direction is now know, we must do something special. */
1419 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1421 #ifdef HAVE_cc0
1422 /* If the previous insn set cc0 for us, delete it. */
1423 if (sets_cc0_p (PREV_INSN (copy)))
1424 delete_insn (PREV_INSN (copy));
1425 #endif
1427 /* If this is now a no-op, delete it. */
1428 if (map->last_pc_value == pc_rtx)
1430 delete_insn (copy);
1431 copy = 0;
1433 else
1434 /* Otherwise, this is unconditional jump so we must put a
1435 BARRIER after it. We could do some dead code elimination
1436 here, but jump.c will do it just as well. */
1437 emit_barrier ();
1439 break;
1441 case CALL_INSN:
1442 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1443 three attached sequences: normal call, sibling call and tail
1444 recursion. */
1445 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1447 rtx sequence[3];
1448 rtx tail_label;
1450 for (i = 0; i < 3; i++)
1452 rtx seq;
1454 sequence[i] = NULL_RTX;
1455 seq = XEXP (PATTERN (insn), i);
1456 if (seq)
1458 start_sequence ();
1459 copy_insn_list (seq, map, static_chain_value);
1460 sequence[i] = get_insns ();
1461 end_sequence ();
1465 /* Find the new tail recursion label.
1466 It will already be substituted into sequence[2]. */
1467 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1468 map, 0);
1470 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1471 sequence[0],
1472 sequence[1],
1473 sequence[2],
1474 tail_label));
1475 break;
1478 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1479 copy = emit_call_insn (pattern);
1481 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1482 CONST_CALL_P (copy) = CONST_CALL_P (insn);
1484 /* Because the USAGE information potentially contains objects other
1485 than hard registers, we need to copy it. */
1487 CALL_INSN_FUNCTION_USAGE (copy)
1488 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1489 map, 0);
1491 #ifdef HAVE_cc0
1492 if (cc0_insn)
1493 try_constants (cc0_insn, map);
1494 cc0_insn = 0;
1495 #endif
1496 try_constants (copy, map);
1498 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1500 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1501 break;
1503 case CODE_LABEL:
1504 copy = emit_label (get_label_from_map (map,
1505 CODE_LABEL_NUMBER (insn)));
1506 LABEL_NAME (copy) = LABEL_NAME (insn);
1507 map->const_age++;
1508 break;
1510 case BARRIER:
1511 copy = emit_barrier ();
1512 break;
1514 case NOTE:
1515 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1516 discarded because it is important to have only one of
1517 each in the current function.
1519 NOTE_INSN_DELETED notes aren't useful.
1521 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1522 pointer (which will soon be dangling) confuses flow's
1523 attempts to preserve bb structures during the compilation
1524 of a function. */
1526 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1527 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1528 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1529 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1531 copy = emit_note (NOTE_SOURCE_FILE (insn),
1532 NOTE_LINE_NUMBER (insn));
1533 if (copy
1534 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1535 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1537 rtx label
1538 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
1540 /* We have to duplicate the handlers for the original. */
1541 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1543 /* We need to duplicate the handlers for the EH region
1544 and we need to indicate where the label map is */
1545 eif_eh_map = map;
1546 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1547 CODE_LABEL_NUMBER (label),
1548 expand_inline_function_eh_labelmap);
1551 /* We have to forward these both to match the new exception
1552 region. */
1553 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
1555 else if (copy
1556 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1557 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1558 && NOTE_BLOCK (insn))
1560 tree *mapped_block_p;
1562 mapped_block_p
1563 = (tree *) bsearch (NOTE_BLOCK (insn),
1564 &VARRAY_TREE (map->block_map, 0),
1565 map->block_map->elements_used,
1566 sizeof (tree),
1567 find_block);
1569 if (!mapped_block_p)
1570 abort ();
1571 else
1572 NOTE_BLOCK (copy) = *mapped_block_p;
1575 else
1576 copy = 0;
1577 break;
1579 default:
1580 abort ();
1583 if (copy)
1584 RTX_INTEGRATED_P (copy) = 1;
1586 map->insn_map[INSN_UID (insn)] = copy;
1590 /* Copy the REG_NOTES. Increment const_age, so that only constants
1591 from parameters can be substituted in. These are the only ones
1592 that are valid across the entire function. */
1594 static void
1595 copy_insn_notes (insns, map)
1596 rtx insns;
1597 struct inline_remap *map;
1599 rtx insn;
1601 map->const_age++;
1602 for (insn = insns; insn; insn = NEXT_INSN (insn))
1603 if (INSN_P (insn)
1604 && map->insn_map[INSN_UID (insn)]
1605 && REG_NOTES (insn))
1607 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1609 /* We must also do subst_constants, in case one of our parameters
1610 has const type and constant value. */
1611 subst_constants (&note, NULL_RTX, map, 0);
1612 apply_change_group ();
1613 REG_NOTES (map->insn_map[INSN_UID (insn)]) = note;
1615 /* Finally, delete any REG_LABEL notes from the chain. */
1616 for (; note; note = next)
1618 next = XEXP (note, 1);
1619 if (REG_NOTE_KIND (note) == REG_LABEL)
1620 remove_note (map->insn_map[INSN_UID (insn)], note);
1625 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1626 push all of those decls and give each one the corresponding home. */
1628 static void
1629 integrate_parm_decls (args, map, arg_vector)
1630 tree args;
1631 struct inline_remap *map;
1632 rtvec arg_vector;
1634 register tree tail;
1635 register int i;
1637 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1639 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1640 current_function_decl);
1641 rtx new_decl_rtl
1642 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1644 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1645 here, but that's going to require some more work. */
1646 /* DECL_INCOMING_RTL (decl) = ?; */
1647 /* Fully instantiate the address with the equivalent form so that the
1648 debugging information contains the actual register, instead of the
1649 virtual register. Do this by not passing an insn to
1650 subst_constants. */
1651 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1652 apply_change_group ();
1653 SET_DECL_RTL (decl, new_decl_rtl);
1657 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1658 current function a tree of contexts isomorphic to the one that is given.
1660 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1661 registers used in the DECL_RTL field should be remapped. If it is zero,
1662 no mapping is necessary. */
1664 static tree
1665 integrate_decl_tree (let, map)
1666 tree let;
1667 struct inline_remap *map;
1669 tree t;
1670 tree new_block;
1671 tree *next;
1673 new_block = make_node (BLOCK);
1674 VARRAY_PUSH_TREE (map->block_map, new_block);
1675 next = &BLOCK_VARS (new_block);
1677 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1679 tree d;
1681 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1683 if (DECL_RTL_SET_P (t))
1685 rtx r;
1687 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1689 /* Fully instantiate the address with the equivalent form so that the
1690 debugging information contains the actual register, instead of the
1691 virtual register. Do this by not passing an insn to
1692 subst_constants. */
1693 r = DECL_RTL (d);
1694 subst_constants (&r, NULL_RTX, map, 1);
1695 SET_DECL_RTL (d, r);
1696 apply_change_group ();
1699 /* Add this declaration to the list of variables in the new
1700 block. */
1701 *next = d;
1702 next = &TREE_CHAIN (d);
1705 next = &BLOCK_SUBBLOCKS (new_block);
1706 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1708 *next = integrate_decl_tree (t, map);
1709 BLOCK_SUPERCONTEXT (*next) = new_block;
1710 next = &BLOCK_CHAIN (*next);
1713 TREE_USED (new_block) = TREE_USED (let);
1714 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1716 return new_block;
1719 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1720 except for those few rtx codes that are sharable.
1722 We always return an rtx that is similar to that incoming rtx, with the
1723 exception of possibly changing a REG to a SUBREG or vice versa. No
1724 rtl is ever emitted.
1726 If FOR_LHS is nonzero, if means we are processing something that will
1727 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1728 inlining since we need to be conservative in how it is set for
1729 such cases.
1731 Handle constants that need to be placed in the constant pool by
1732 calling `force_const_mem'. */
1735 copy_rtx_and_substitute (orig, map, for_lhs)
1736 register rtx orig;
1737 struct inline_remap *map;
1738 int for_lhs;
1740 register rtx copy, temp;
1741 register int i, j;
1742 register RTX_CODE code;
1743 register enum machine_mode mode;
1744 register const char *format_ptr;
1745 int regno;
1747 if (orig == 0)
1748 return 0;
1750 code = GET_CODE (orig);
1751 mode = GET_MODE (orig);
1753 switch (code)
1755 case REG:
1756 /* If the stack pointer register shows up, it must be part of
1757 stack-adjustments (*not* because we eliminated the frame pointer!).
1758 Small hard registers are returned as-is. Pseudo-registers
1759 go through their `reg_map'. */
1760 regno = REGNO (orig);
1761 if (regno <= LAST_VIRTUAL_REGISTER
1762 || (map->integrating
1763 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1765 /* Some hard registers are also mapped,
1766 but others are not translated. */
1767 if (map->reg_map[regno] != 0
1768 /* We shouldn't usually have reg_map set for return
1769 register, but it may happen if we have leaf-register
1770 remapping and the return register is used in one of
1771 the calling sequences of a call_placeholer. In this
1772 case, we'll end up with a reg_map set for this
1773 register, but we don't want to use for registers
1774 marked as return values. */
1775 && ! REG_FUNCTION_VALUE_P (orig))
1776 return map->reg_map[regno];
1778 /* If this is the virtual frame pointer, make space in current
1779 function's stack frame for the stack frame of the inline function.
1781 Copy the address of this area into a pseudo. Map
1782 virtual_stack_vars_rtx to this pseudo and set up a constant
1783 equivalence for it to be the address. This will substitute the
1784 address into insns where it can be substituted and use the new
1785 pseudo where it can't. */
1786 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1788 rtx loc, seq;
1789 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1790 #ifdef FRAME_GROWS_DOWNWARD
1791 int alignment
1792 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1793 / BITS_PER_UNIT);
1795 /* In this case, virtual_stack_vars_rtx points to one byte
1796 higher than the top of the frame area. So make sure we
1797 allocate a big enough chunk to keep the frame pointer
1798 aligned like a real one. */
1799 if (alignment)
1800 size = CEIL_ROUND (size, alignment);
1801 #endif
1802 start_sequence ();
1803 loc = assign_stack_temp (BLKmode, size, 1);
1804 loc = XEXP (loc, 0);
1805 #ifdef FRAME_GROWS_DOWNWARD
1806 /* In this case, virtual_stack_vars_rtx points to one byte
1807 higher than the top of the frame area. So compute the offset
1808 to one byte higher than our substitute frame. */
1809 loc = plus_constant (loc, size);
1810 #endif
1811 map->reg_map[regno] = temp
1812 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1814 #ifdef STACK_BOUNDARY
1815 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1816 #endif
1818 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1820 seq = gen_sequence ();
1821 end_sequence ();
1822 emit_insn_after (seq, map->insns_at_start);
1823 return temp;
1825 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1826 || (map->integrating
1827 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1828 == orig)))
1830 /* Do the same for a block to contain any arguments referenced
1831 in memory. */
1832 rtx loc, seq;
1833 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1835 start_sequence ();
1836 loc = assign_stack_temp (BLKmode, size, 1);
1837 loc = XEXP (loc, 0);
1838 /* When arguments grow downward, the virtual incoming
1839 args pointer points to the top of the argument block,
1840 so the remapped location better do the same. */
1841 #ifdef ARGS_GROW_DOWNWARD
1842 loc = plus_constant (loc, size);
1843 #endif
1844 map->reg_map[regno] = temp
1845 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1847 #ifdef STACK_BOUNDARY
1848 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1849 #endif
1851 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1853 seq = gen_sequence ();
1854 end_sequence ();
1855 emit_insn_after (seq, map->insns_at_start);
1856 return temp;
1858 else if (REG_FUNCTION_VALUE_P (orig))
1860 /* This is a reference to the function return value. If
1861 the function doesn't have a return value, error. If the
1862 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1863 if (map->inline_target == 0)
1865 if (rtx_equal_function_value_matters)
1866 /* This is an ignored return value. We must not
1867 leave it in with REG_FUNCTION_VALUE_P set, since
1868 that would confuse subsequent inlining of the
1869 current function into a later function. */
1870 return gen_rtx_REG (GET_MODE (orig), regno);
1871 else
1872 /* Must be unrolling loops or replicating code if we
1873 reach here, so return the register unchanged. */
1874 return orig;
1876 else if (GET_MODE (map->inline_target) != BLKmode
1877 && mode != GET_MODE (map->inline_target))
1878 return gen_lowpart (mode, map->inline_target);
1879 else
1880 return map->inline_target;
1882 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1883 /* If leaf_renumber_regs_insn() might remap this register to
1884 some other number, make sure we don't share it with the
1885 inlined function, otherwise delayed optimization of the
1886 inlined function may change it in place, breaking our
1887 reference to it. We may still shared it within the
1888 function, so create an entry for this register in the
1889 reg_map. */
1890 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1891 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1893 temp = gen_rtx_REG (mode, regno);
1894 map->reg_map[regno] = temp;
1895 return temp;
1897 #endif
1898 else
1899 return orig;
1901 abort ();
1903 if (map->reg_map[regno] == NULL)
1905 map->reg_map[regno] = gen_reg_rtx (mode);
1906 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1907 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1908 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1909 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1911 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1912 mark_reg_pointer (map->reg_map[regno],
1913 map->regno_pointer_align[regno]);
1915 return map->reg_map[regno];
1917 case SUBREG:
1918 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1919 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1920 if (GET_CODE (copy) == SUBREG)
1921 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1922 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1923 else if (GET_CODE (copy) == CONCAT)
1925 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1927 if (GET_MODE (retval) == GET_MODE (orig))
1928 return retval;
1929 else
1930 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1931 (SUBREG_WORD (orig) %
1932 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1933 / (unsigned) UNITS_PER_WORD)));
1935 else
1936 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1937 SUBREG_WORD (orig));
1939 case ADDRESSOF:
1940 copy = gen_rtx_ADDRESSOF (mode,
1941 copy_rtx_and_substitute (XEXP (orig, 0),
1942 map, for_lhs),
1943 0, ADDRESSOF_DECL (orig));
1944 regno = ADDRESSOF_REGNO (orig);
1945 if (map->reg_map[regno])
1946 regno = REGNO (map->reg_map[regno]);
1947 else if (regno > LAST_VIRTUAL_REGISTER)
1949 temp = XEXP (orig, 0);
1950 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1951 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1952 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1953 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1954 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1956 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
1957 mark_reg_pointer (map->reg_map[regno],
1958 map->regno_pointer_align[regno]);
1959 regno = REGNO (map->reg_map[regno]);
1961 ADDRESSOF_REGNO (copy) = regno;
1962 return copy;
1964 case USE:
1965 case CLOBBER:
1966 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1967 to (use foo) if the original insn didn't have a subreg.
1968 Removing the subreg distorts the VAX movstrhi pattern
1969 by changing the mode of an operand. */
1970 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1971 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1972 copy = SUBREG_REG (copy);
1973 return gen_rtx_fmt_e (code, VOIDmode, copy);
1975 case CODE_LABEL:
1976 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1977 = LABEL_PRESERVE_P (orig);
1978 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1980 /* We need to handle "deleted" labels that appear in the DECL_RTL
1981 of a LABEL_DECL. */
1982 case NOTE:
1983 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1984 return map->insn_map[INSN_UID (orig)];
1985 break;
1987 case LABEL_REF:
1988 copy
1989 = gen_rtx_LABEL_REF
1990 (mode,
1991 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1992 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1994 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1996 /* The fact that this label was previously nonlocal does not mean
1997 it still is, so we must check if it is within the range of
1998 this function's labels. */
1999 LABEL_REF_NONLOCAL_P (copy)
2000 = (LABEL_REF_NONLOCAL_P (orig)
2001 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2002 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2004 /* If we have made a nonlocal label local, it means that this
2005 inlined call will be referring to our nonlocal goto handler.
2006 So make sure we create one for this block; we normally would
2007 not since this is not otherwise considered a "call". */
2008 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2009 function_call_count++;
2011 return copy;
2013 case PC:
2014 case CC0:
2015 case CONST_INT:
2016 return orig;
2018 case SYMBOL_REF:
2019 /* Symbols which represent the address of a label stored in the constant
2020 pool must be modified to point to a constant pool entry for the
2021 remapped label. Otherwise, symbols are returned unchanged. */
2022 if (CONSTANT_POOL_ADDRESS_P (orig))
2024 struct function *f = inlining ? inlining : cfun;
2025 rtx constant = get_pool_constant_for_function (f, orig);
2026 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2027 if (inlining)
2029 rtx temp = force_const_mem (const_mode,
2030 copy_rtx_and_substitute (constant,
2031 map, 0));
2033 #if 0
2034 /* Legitimizing the address here is incorrect.
2036 Since we had a SYMBOL_REF before, we can assume it is valid
2037 to have one in this position in the insn.
2039 Also, change_address may create new registers. These
2040 registers will not have valid reg_map entries. This can
2041 cause try_constants() to fail because assumes that all
2042 registers in the rtx have valid reg_map entries, and it may
2043 end up replacing one of these new registers with junk. */
2045 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2046 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2047 #endif
2049 temp = XEXP (temp, 0);
2051 #ifdef POINTERS_EXTEND_UNSIGNED
2052 if (GET_MODE (temp) != GET_MODE (orig))
2053 temp = convert_memory_address (GET_MODE (orig), temp);
2054 #endif
2055 return temp;
2057 else if (GET_CODE (constant) == LABEL_REF)
2058 return XEXP (force_const_mem
2059 (GET_MODE (orig),
2060 copy_rtx_and_substitute (constant, map, for_lhs)),
2063 else if (SYMBOL_REF_NEED_ADJUST (orig))
2065 eif_eh_map = map;
2066 return rethrow_symbol_map (orig,
2067 expand_inline_function_eh_labelmap);
2070 return orig;
2072 case CONST_DOUBLE:
2073 /* We have to make a new copy of this CONST_DOUBLE because don't want
2074 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2075 duplicate of a CONST_DOUBLE we have already seen. */
2076 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2078 REAL_VALUE_TYPE d;
2080 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2081 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2083 else
2084 return immed_double_const (CONST_DOUBLE_LOW (orig),
2085 CONST_DOUBLE_HIGH (orig), VOIDmode);
2087 case CONST:
2088 /* Make new constant pool entry for a constant
2089 that was in the pool of the inline function. */
2090 if (RTX_INTEGRATED_P (orig))
2091 abort ();
2092 break;
2094 case ASM_OPERANDS:
2095 /* If a single asm insn contains multiple output operands then
2096 it contains multiple ASM_OPERANDS rtx's that share the input
2097 and constraint vecs. We must make sure that the copied insn
2098 continues to share it. */
2099 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2101 copy = rtx_alloc (ASM_OPERANDS);
2102 copy->volatil = orig->volatil;
2103 PUT_MODE (copy, GET_MODE (orig));
2104 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2105 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2106 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2107 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2108 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2109 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2110 = map->copy_asm_constraints_vector;
2111 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2112 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2113 return copy;
2115 break;
2117 case CALL:
2118 /* This is given special treatment because the first
2119 operand of a CALL is a (MEM ...) which may get
2120 forced into a register for cse. This is undesirable
2121 if function-address cse isn't wanted or if we won't do cse. */
2122 #ifndef NO_FUNCTION_CSE
2123 if (! (optimize && ! flag_no_function_cse))
2124 #endif
2125 return
2126 gen_rtx_CALL
2127 (GET_MODE (orig),
2128 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2129 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2130 map, 0)),
2131 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2132 break;
2134 #if 0
2135 /* Must be ifdefed out for loop unrolling to work. */
2136 case RETURN:
2137 abort ();
2138 #endif
2140 case SET:
2141 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2142 Adjust the setting by the offset of the area we made.
2143 If the nonlocal goto is into the current function,
2144 this will result in unnecessarily bad code, but should work. */
2145 if (SET_DEST (orig) == virtual_stack_vars_rtx
2146 || SET_DEST (orig) == virtual_incoming_args_rtx)
2148 /* In case a translation hasn't occurred already, make one now. */
2149 rtx equiv_reg;
2150 rtx equiv_loc;
2151 HOST_WIDE_INT loc_offset;
2153 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2154 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2155 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2156 REGNO (equiv_reg)).rtx;
2157 loc_offset
2158 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2160 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2161 force_operand
2162 (plus_constant
2163 (copy_rtx_and_substitute (SET_SRC (orig),
2164 map, 0),
2165 - loc_offset),
2166 NULL_RTX));
2168 else
2169 return gen_rtx_SET (VOIDmode,
2170 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2171 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2172 break;
2174 case MEM:
2175 if (inlining
2176 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2177 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2179 enum machine_mode const_mode
2180 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2181 rtx constant
2182 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2184 constant = copy_rtx_and_substitute (constant, map, 0);
2186 /* If this was an address of a constant pool entry that itself
2187 had to be placed in the constant pool, it might not be a
2188 valid address. So the recursive call might have turned it
2189 into a register. In that case, it isn't a constant any
2190 more, so return it. This has the potential of changing a
2191 MEM into a REG, but we'll assume that it safe. */
2192 if (! CONSTANT_P (constant))
2193 return constant;
2195 return validize_mem (force_const_mem (const_mode, constant));
2198 copy = rtx_alloc (MEM);
2199 PUT_MODE (copy, mode);
2200 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
2201 MEM_COPY_ATTRIBUTES (copy, orig);
2202 return copy;
2204 default:
2205 break;
2208 copy = rtx_alloc (code);
2209 PUT_MODE (copy, mode);
2210 copy->in_struct = orig->in_struct;
2211 copy->volatil = orig->volatil;
2212 copy->unchanging = orig->unchanging;
2214 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2216 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2218 switch (*format_ptr++)
2220 case '0':
2221 /* Copy this through the wide int field; that's safest. */
2222 X0WINT (copy, i) = X0WINT (orig, i);
2223 break;
2225 case 'e':
2226 XEXP (copy, i)
2227 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2228 break;
2230 case 'u':
2231 /* Change any references to old-insns to point to the
2232 corresponding copied insns. */
2233 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2234 break;
2236 case 'E':
2237 XVEC (copy, i) = XVEC (orig, i);
2238 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2240 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2241 for (j = 0; j < XVECLEN (copy, i); j++)
2242 XVECEXP (copy, i, j)
2243 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2244 map, for_lhs);
2246 break;
2248 case 'w':
2249 XWINT (copy, i) = XWINT (orig, i);
2250 break;
2252 case 'i':
2253 XINT (copy, i) = XINT (orig, i);
2254 break;
2256 case 's':
2257 XSTR (copy, i) = XSTR (orig, i);
2258 break;
2260 case 't':
2261 XTREE (copy, i) = XTREE (orig, i);
2262 break;
2264 default:
2265 abort ();
2269 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2271 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2272 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2273 map->copy_asm_constraints_vector
2274 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2277 return copy;
2280 /* Substitute known constant values into INSN, if that is valid. */
2282 void
2283 try_constants (insn, map)
2284 rtx insn;
2285 struct inline_remap *map;
2287 int i;
2289 map->num_sets = 0;
2291 /* First try just updating addresses, then other things. This is
2292 important when we have something like the store of a constant
2293 into memory and we can update the memory address but the machine
2294 does not support a constant source. */
2295 subst_constants (&PATTERN (insn), insn, map, 1);
2296 apply_change_group ();
2297 subst_constants (&PATTERN (insn), insn, map, 0);
2298 apply_change_group ();
2300 /* Show we don't know the value of anything stored or clobbered. */
2301 note_stores (PATTERN (insn), mark_stores, NULL);
2302 map->last_pc_value = 0;
2303 #ifdef HAVE_cc0
2304 map->last_cc0_value = 0;
2305 #endif
2307 /* Set up any constant equivalences made in this insn. */
2308 for (i = 0; i < map->num_sets; i++)
2310 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2312 int regno = REGNO (map->equiv_sets[i].dest);
2314 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2315 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2316 /* Following clause is a hack to make case work where GNU C++
2317 reassigns a variable to make cse work right. */
2318 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2319 regno).rtx,
2320 map->equiv_sets[i].equiv))
2321 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2322 map->equiv_sets[i].equiv, map->const_age);
2324 else if (map->equiv_sets[i].dest == pc_rtx)
2325 map->last_pc_value = map->equiv_sets[i].equiv;
2326 #ifdef HAVE_cc0
2327 else if (map->equiv_sets[i].dest == cc0_rtx)
2328 map->last_cc0_value = map->equiv_sets[i].equiv;
2329 #endif
2333 /* Substitute known constants for pseudo regs in the contents of LOC,
2334 which are part of INSN.
2335 If INSN is zero, the substitution should always be done (this is used to
2336 update DECL_RTL).
2337 These changes are taken out by try_constants if the result is not valid.
2339 Note that we are more concerned with determining when the result of a SET
2340 is a constant, for further propagation, than actually inserting constants
2341 into insns; cse will do the latter task better.
2343 This function is also used to adjust address of items previously addressed
2344 via the virtual stack variable or virtual incoming arguments registers.
2346 If MEMONLY is nonzero, only make changes inside a MEM. */
2348 static void
2349 subst_constants (loc, insn, map, memonly)
2350 rtx *loc;
2351 rtx insn;
2352 struct inline_remap *map;
2353 int memonly;
2355 rtx x = *loc;
2356 register int i, j;
2357 register enum rtx_code code;
2358 register const char *format_ptr;
2359 int num_changes = num_validated_changes ();
2360 rtx new = 0;
2361 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2363 code = GET_CODE (x);
2365 switch (code)
2367 case PC:
2368 case CONST_INT:
2369 case CONST_DOUBLE:
2370 case SYMBOL_REF:
2371 case CONST:
2372 case LABEL_REF:
2373 case ADDRESS:
2374 return;
2376 #ifdef HAVE_cc0
2377 case CC0:
2378 if (! memonly)
2379 validate_change (insn, loc, map->last_cc0_value, 1);
2380 return;
2381 #endif
2383 case USE:
2384 case CLOBBER:
2385 /* The only thing we can do with a USE or CLOBBER is possibly do
2386 some substitutions in a MEM within it. */
2387 if (GET_CODE (XEXP (x, 0)) == MEM)
2388 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2389 return;
2391 case REG:
2392 /* Substitute for parms and known constants. Don't replace
2393 hard regs used as user variables with constants. */
2394 if (! memonly)
2396 int regno = REGNO (x);
2397 struct const_equiv_data *p;
2399 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2400 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2401 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2402 p->rtx != 0)
2403 && p->age >= map->const_age)
2404 validate_change (insn, loc, p->rtx, 1);
2406 return;
2408 case SUBREG:
2409 /* SUBREG applied to something other than a reg
2410 should be treated as ordinary, since that must
2411 be a special hack and we don't know how to treat it specially.
2412 Consider for example mulsidi3 in m68k.md.
2413 Ordinary SUBREG of a REG needs this special treatment. */
2414 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2416 rtx inner = SUBREG_REG (x);
2417 rtx new = 0;
2419 /* We can't call subst_constants on &SUBREG_REG (x) because any
2420 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2421 see what is inside, try to form the new SUBREG and see if that is
2422 valid. We handle two cases: extracting a full word in an
2423 integral mode and extracting the low part. */
2424 subst_constants (&inner, NULL_RTX, map, 0);
2426 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2427 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2428 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2429 new = operand_subword (inner, SUBREG_WORD (x), 0,
2430 GET_MODE (SUBREG_REG (x)));
2432 cancel_changes (num_changes);
2433 if (new == 0 && subreg_lowpart_p (x))
2434 new = gen_lowpart_common (GET_MODE (x), inner);
2436 if (new)
2437 validate_change (insn, loc, new, 1);
2439 return;
2441 break;
2443 case MEM:
2444 subst_constants (&XEXP (x, 0), insn, map, 0);
2446 /* If a memory address got spoiled, change it back. */
2447 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2448 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2449 cancel_changes (num_changes);
2450 return;
2452 case SET:
2454 /* Substitute constants in our source, and in any arguments to a
2455 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2456 itself. */
2457 rtx *dest_loc = &SET_DEST (x);
2458 rtx dest = *dest_loc;
2459 rtx src, tem;
2460 enum machine_mode compare_mode = VOIDmode;
2462 /* If SET_SRC is a COMPARE which subst_constants would turn into
2463 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2464 is to be done. */
2465 if (GET_CODE (SET_SRC (x)) == COMPARE)
2467 src = SET_SRC (x);
2468 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2469 #ifdef HAVE_cc0
2470 || dest == cc0_rtx
2471 #endif
2474 compare_mode = GET_MODE (XEXP (src, 0));
2475 if (compare_mode == VOIDmode)
2476 compare_mode = GET_MODE (XEXP (src, 1));
2480 subst_constants (&SET_SRC (x), insn, map, memonly);
2481 src = SET_SRC (x);
2483 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2484 || GET_CODE (*dest_loc) == SUBREG
2485 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2487 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2489 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2490 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2492 dest_loc = &XEXP (*dest_loc, 0);
2495 /* Do substitute in the address of a destination in memory. */
2496 if (GET_CODE (*dest_loc) == MEM)
2497 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2499 /* Check for the case of DEST a SUBREG, both it and the underlying
2500 register are less than one word, and the SUBREG has the wider mode.
2501 In the case, we are really setting the underlying register to the
2502 source converted to the mode of DEST. So indicate that. */
2503 if (GET_CODE (dest) == SUBREG
2504 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2505 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2506 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2507 <= GET_MODE_SIZE (GET_MODE (dest)))
2508 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2509 src)))
2510 src = tem, dest = SUBREG_REG (dest);
2512 /* If storing a recognizable value save it for later recording. */
2513 if ((map->num_sets < MAX_RECOG_OPERANDS)
2514 && (CONSTANT_P (src)
2515 || (GET_CODE (src) == REG
2516 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2517 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2518 || (GET_CODE (src) == PLUS
2519 && GET_CODE (XEXP (src, 0)) == REG
2520 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2521 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2522 && CONSTANT_P (XEXP (src, 1)))
2523 || GET_CODE (src) == COMPARE
2524 #ifdef HAVE_cc0
2525 || dest == cc0_rtx
2526 #endif
2527 || (dest == pc_rtx
2528 && (src == pc_rtx || GET_CODE (src) == RETURN
2529 || GET_CODE (src) == LABEL_REF))))
2531 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2532 it will cause us to save the COMPARE with any constants
2533 substituted, which is what we want for later. */
2534 rtx src_copy = copy_rtx (src);
2535 map->equiv_sets[map->num_sets].equiv = src_copy;
2536 map->equiv_sets[map->num_sets++].dest = dest;
2537 if (compare_mode != VOIDmode
2538 && GET_CODE (src) == COMPARE
2539 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2540 #ifdef HAVE_cc0
2541 || dest == cc0_rtx
2542 #endif
2544 && GET_MODE (XEXP (src, 0)) == VOIDmode
2545 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2547 map->compare_src = src_copy;
2548 map->compare_mode = compare_mode;
2552 return;
2554 default:
2555 break;
2558 format_ptr = GET_RTX_FORMAT (code);
2560 /* If the first operand is an expression, save its mode for later. */
2561 if (*format_ptr == 'e')
2562 op0_mode = GET_MODE (XEXP (x, 0));
2564 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2566 switch (*format_ptr++)
2568 case '0':
2569 break;
2571 case 'e':
2572 if (XEXP (x, i))
2573 subst_constants (&XEXP (x, i), insn, map, memonly);
2574 break;
2576 case 'u':
2577 case 'i':
2578 case 's':
2579 case 'w':
2580 case 'n':
2581 case 't':
2582 break;
2584 case 'E':
2585 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2586 for (j = 0; j < XVECLEN (x, i); j++)
2587 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2589 break;
2591 default:
2592 abort ();
2596 /* If this is a commutative operation, move a constant to the second
2597 operand unless the second operand is already a CONST_INT. */
2598 if (! memonly
2599 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2600 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2602 rtx tem = XEXP (x, 0);
2603 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2604 validate_change (insn, &XEXP (x, 1), tem, 1);
2607 /* Simplify the expression in case we put in some constants. */
2608 if (! memonly)
2609 switch (GET_RTX_CLASS (code))
2611 case '1':
2612 if (op0_mode == MAX_MACHINE_MODE)
2613 abort ();
2614 new = simplify_unary_operation (code, GET_MODE (x),
2615 XEXP (x, 0), op0_mode);
2616 break;
2618 case '<':
2620 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2622 if (op_mode == VOIDmode)
2623 op_mode = GET_MODE (XEXP (x, 1));
2624 new = simplify_relational_operation (code, op_mode,
2625 XEXP (x, 0), XEXP (x, 1));
2626 #ifdef FLOAT_STORE_FLAG_VALUE
2627 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2629 enum machine_mode mode = GET_MODE (x);
2630 if (new == const0_rtx)
2631 new = CONST0_RTX (mode);
2632 else
2634 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2635 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2638 #endif
2639 break;
2642 case '2':
2643 case 'c':
2644 new = simplify_binary_operation (code, GET_MODE (x),
2645 XEXP (x, 0), XEXP (x, 1));
2646 break;
2648 case 'b':
2649 case '3':
2650 if (op0_mode == MAX_MACHINE_MODE)
2651 abort ();
2653 if (code == IF_THEN_ELSE)
2655 rtx op0 = XEXP (x, 0);
2657 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2658 && GET_MODE (op0) == VOIDmode
2659 && ! side_effects_p (op0)
2660 && XEXP (op0, 0) == map->compare_src
2661 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2663 /* We have compare of two VOIDmode constants for which
2664 we recorded the comparison mode. */
2665 rtx temp =
2666 simplify_relational_operation (GET_CODE (op0),
2667 map->compare_mode,
2668 XEXP (op0, 0),
2669 XEXP (op0, 1));
2671 if (temp == const0_rtx)
2672 new = XEXP (x, 2);
2673 else if (temp == const1_rtx)
2674 new = XEXP (x, 1);
2677 if (!new)
2678 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2679 XEXP (x, 0), XEXP (x, 1),
2680 XEXP (x, 2));
2681 break;
2684 if (new)
2685 validate_change (insn, loc, new, 1);
2688 /* Show that register modified no longer contain known constants. We are
2689 called from note_stores with parts of the new insn. */
2691 static void
2692 mark_stores (dest, x, data)
2693 rtx dest;
2694 rtx x ATTRIBUTE_UNUSED;
2695 void *data ATTRIBUTE_UNUSED;
2697 int regno = -1;
2698 enum machine_mode mode = VOIDmode;
2700 /* DEST is always the innermost thing set, except in the case of
2701 SUBREGs of hard registers. */
2703 if (GET_CODE (dest) == REG)
2704 regno = REGNO (dest), mode = GET_MODE (dest);
2705 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2707 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2708 mode = GET_MODE (SUBREG_REG (dest));
2711 if (regno >= 0)
2713 unsigned int uregno = regno;
2714 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2715 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2716 unsigned int i;
2718 /* Ignore virtual stack var or virtual arg register since those
2719 are handled separately. */
2720 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2721 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2722 for (i = uregno; i <= last_reg; i++)
2723 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2724 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2728 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2729 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2730 that it points to the node itself, thus indicating that the node is its
2731 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2732 the given node is NULL, recursively descend the decl/block tree which
2733 it is the root of, and for each other ..._DECL or BLOCK node contained
2734 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2735 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2736 values to point to themselves. */
2738 static void
2739 set_block_origin_self (stmt)
2740 register tree stmt;
2742 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2744 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2747 register tree local_decl;
2749 for (local_decl = BLOCK_VARS (stmt);
2750 local_decl != NULL_TREE;
2751 local_decl = TREE_CHAIN (local_decl))
2752 set_decl_origin_self (local_decl); /* Potential recursion. */
2756 register tree subblock;
2758 for (subblock = BLOCK_SUBBLOCKS (stmt);
2759 subblock != NULL_TREE;
2760 subblock = BLOCK_CHAIN (subblock))
2761 set_block_origin_self (subblock); /* Recurse. */
2766 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2767 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2768 node to so that it points to the node itself, thus indicating that the
2769 node represents its own (abstract) origin. Additionally, if the
2770 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2771 the decl/block tree of which the given node is the root of, and for
2772 each other ..._DECL or BLOCK node contained therein whose
2773 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2774 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2775 point to themselves. */
2777 void
2778 set_decl_origin_self (decl)
2779 register tree decl;
2781 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2783 DECL_ABSTRACT_ORIGIN (decl) = decl;
2784 if (TREE_CODE (decl) == FUNCTION_DECL)
2786 register tree arg;
2788 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2789 DECL_ABSTRACT_ORIGIN (arg) = arg;
2790 if (DECL_INITIAL (decl) != NULL_TREE
2791 && DECL_INITIAL (decl) != error_mark_node)
2792 set_block_origin_self (DECL_INITIAL (decl));
2797 /* Given a pointer to some BLOCK node, and a boolean value to set the
2798 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2799 the given block, and for all local decls and all local sub-blocks
2800 (recursively) which are contained therein. */
2802 static void
2803 set_block_abstract_flags (stmt, setting)
2804 register tree stmt;
2805 register int setting;
2807 register tree local_decl;
2808 register tree subblock;
2810 BLOCK_ABSTRACT (stmt) = setting;
2812 for (local_decl = BLOCK_VARS (stmt);
2813 local_decl != NULL_TREE;
2814 local_decl = TREE_CHAIN (local_decl))
2815 set_decl_abstract_flags (local_decl, setting);
2817 for (subblock = BLOCK_SUBBLOCKS (stmt);
2818 subblock != NULL_TREE;
2819 subblock = BLOCK_CHAIN (subblock))
2820 set_block_abstract_flags (subblock, setting);
2823 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2824 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2825 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2826 set the abstract flags for all of the parameters, local vars, local
2827 blocks and sub-blocks (recursively) to the same setting. */
2829 void
2830 set_decl_abstract_flags (decl, setting)
2831 register tree decl;
2832 register int setting;
2834 DECL_ABSTRACT (decl) = setting;
2835 if (TREE_CODE (decl) == FUNCTION_DECL)
2837 register tree arg;
2839 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2840 DECL_ABSTRACT (arg) = setting;
2841 if (DECL_INITIAL (decl) != NULL_TREE
2842 && DECL_INITIAL (decl) != error_mark_node)
2843 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2847 /* Output the assembly language code for the function FNDECL
2848 from its DECL_SAVED_INSNS. Used for inline functions that are output
2849 at end of compilation instead of where they came in the source. */
2851 void
2852 output_inline_function (fndecl)
2853 tree fndecl;
2855 struct function *old_cfun = cfun;
2856 enum debug_info_type old_write_symbols = write_symbols;
2857 struct function *f = DECL_SAVED_INSNS (fndecl);
2859 cfun = f;
2860 current_function_decl = fndecl;
2861 clear_emit_caches ();
2863 set_new_last_label_num (f->inl_max_label_num);
2865 /* We're not deferring this any longer. */
2866 DECL_DEFER_OUTPUT (fndecl) = 0;
2868 /* If requested, suppress debugging information. */
2869 if (f->no_debugging_symbols)
2870 write_symbols = NO_DEBUG;
2872 /* Do any preparation, such as emitting abstract debug info for the inline
2873 before it gets mangled by optimization. */
2874 note_outlining_of_inline_function (fndecl);
2876 /* Compile this function all the way down to assembly code. */
2877 rest_of_compilation (fndecl);
2879 /* We can't inline this anymore. */
2880 f->inlinable = 0;
2881 DECL_INLINE (fndecl) = 0;
2883 cfun = old_cfun;
2884 current_function_decl = old_cfun ? old_cfun->decl : 0;
2885 write_symbols = old_write_symbols;