include/ChangeLog:
[official-gcc.git] / gcc / integrate.c
blobab4af1823f17f682d165319490eeede033ee5103
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
77 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
79 static rtvec initialize_for_inline PARAMS ((tree));
80 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
81 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
82 rtvec));
83 static tree integrate_decl_tree PARAMS ((tree,
84 struct inline_remap *));
85 static void subst_constants PARAMS ((rtx *, rtx,
86 struct inline_remap *, int));
87 static void set_block_origin_self PARAMS ((tree));
88 static void set_block_abstract_flags PARAMS ((tree, int));
89 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
90 rtx));
91 static void mark_stores PARAMS ((rtx, rtx, void *));
92 static void save_parm_insns PARAMS ((rtx, rtx));
93 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
94 rtx));
95 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
96 int));
97 static int compare_blocks PARAMS ((const void *, const void *));
98 static int find_block PARAMS ((const void *, const void *));
100 /* Used by copy_rtx_and_substitute; this indicates whether the function is
101 called for the purpose of inlining or some other purpose (i.e. loop
102 unrolling). This affects how constant pool references are handled.
103 This variable contains the FUNCTION_DECL for the inlined function. */
104 static struct function *inlining = 0;
106 /* Returns the Ith entry in the label_map contained in MAP. If the
107 Ith entry has not yet been set, return a fresh label. This function
108 performs a lazy initialization of label_map, thereby avoiding huge memory
109 explosions when the label_map gets very large. */
112 get_label_from_map (map, i)
113 struct inline_remap *map;
114 int i;
116 rtx x = map->label_map[i];
118 if (x == NULL_RTX)
119 x = map->label_map[i] = gen_label_rtx ();
121 return x;
124 /* Return false if the function FNDECL cannot be inlined on account of its
125 attributes, true otherwise. */
126 bool
127 function_attribute_inlinable_p (fndecl)
128 tree fndecl;
130 if (targetm.attribute_table)
132 tree a;
134 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
136 tree name = TREE_PURPOSE (a);
137 int i;
139 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
140 if (is_attribute_p (targetm.attribute_table[i].name, name))
141 return (*targetm.function_attribute_inlinable_p) (fndecl);
145 return true;
148 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
149 is safe and reasonable to integrate into other functions.
150 Nonzero means value is a warning msgid with a single %s
151 for the function's name. */
153 const char *
154 function_cannot_inline_p (fndecl)
155 tree fndecl;
157 rtx insn;
158 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
160 /* For functions marked as inline increase the maximum size to
161 MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
162 regular functions use the limit given by INTEGRATE_THRESHOLD.
163 Note that the RTL inliner is not used by the languages that use
164 the tree inliner (C, C++). */
166 int max_insns = (DECL_INLINE (fndecl))
167 ? (MAX_INLINE_INSNS_RTL
168 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
169 : INTEGRATE_THRESHOLD (fndecl);
171 int ninsns = 0;
172 tree parms;
174 if (DECL_UNINLINABLE (fndecl))
175 return N_("function cannot be inline");
177 /* No inlines with varargs. */
178 if (last && TREE_VALUE (last) != void_type_node)
179 return N_("varargs function cannot be inline");
181 if (current_function_calls_alloca)
182 return N_("function using alloca cannot be inline");
184 if (current_function_calls_setjmp)
185 return N_("function using setjmp cannot be inline");
187 if (current_function_calls_eh_return)
188 return N_("function uses __builtin_eh_return");
190 if (current_function_contains_functions)
191 return N_("function with nested functions cannot be inline");
193 if (forced_labels)
194 return
195 N_("function with label addresses used in initializers cannot inline");
197 if (current_function_cannot_inline)
198 return current_function_cannot_inline;
200 /* If its not even close, don't even look. */
201 if (get_max_uid () > 3 * max_insns)
202 return N_("function too large to be inline");
204 #if 0
205 /* Don't inline functions which do not specify a function prototype and
206 have BLKmode argument or take the address of a parameter. */
207 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
209 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
210 TREE_ADDRESSABLE (parms) = 1;
211 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
212 return N_("no prototype, and parameter address used; cannot be inline");
214 #endif
216 /* We can't inline functions that return structures
217 the old-fashioned PCC way, copying into a static block. */
218 if (current_function_returns_pcc_struct)
219 return N_("inline functions not supported for this return value type");
221 /* We can't inline functions that return structures of varying size. */
222 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
223 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
224 return N_("function with varying-size return value cannot be inline");
226 /* Cannot inline a function with a varying size argument or one that
227 receives a transparent union. */
228 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
230 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
231 return N_("function with varying-size parameter cannot be inline");
232 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
233 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
234 return N_("function with transparent unit parameter cannot be inline");
237 if (get_max_uid () > max_insns)
239 for (ninsns = 0, insn = get_first_nonparm_insn ();
240 insn && ninsns < max_insns;
241 insn = NEXT_INSN (insn))
242 if (INSN_P (insn))
243 ninsns++;
245 if (ninsns >= max_insns)
246 return N_("function too large to be inline");
249 /* We will not inline a function which uses computed goto. The addresses of
250 its local labels, which may be tucked into global storage, are of course
251 not constant across instantiations, which causes unexpected behavior. */
252 if (current_function_has_computed_jump)
253 return N_("function with computed jump cannot inline");
255 /* We cannot inline a nested function that jumps to a nonlocal label. */
256 if (current_function_has_nonlocal_goto)
257 return N_("function with nonlocal goto cannot be inline");
259 /* We can't inline functions that return a PARALLEL rtx. */
260 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
262 rtx result = DECL_RTL (DECL_RESULT (fndecl));
263 if (GET_CODE (result) == PARALLEL)
264 return N_("inline functions not supported for this return value type");
267 /* If the function has a target specific attribute attached to it,
268 then we assume that we should not inline it. This can be overridden
269 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
270 if (!function_attribute_inlinable_p (fndecl))
271 return N_("function with target specific attribute(s) cannot be inlined");
273 return NULL;
276 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
277 Zero for a reg that isn't a parm's home.
278 Only reg numbers less than max_parm_reg are mapped here. */
279 static tree *parmdecl_map;
281 /* In save_for_inline, nonzero if past the parm-initialization insns. */
282 static int in_nonparm_insns;
284 /* Subroutine for `save_for_inline'. Performs initialization
285 needed to save FNDECL's insns and info for future inline expansion. */
287 static rtvec
288 initialize_for_inline (fndecl)
289 tree fndecl;
291 int i;
292 rtvec arg_vector;
293 tree parms;
295 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
296 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
297 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
299 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
300 parms;
301 parms = TREE_CHAIN (parms), i++)
303 rtx p = DECL_RTL (parms);
305 /* If we have (mem (addressof (mem ...))), use the inner MEM since
306 otherwise the copy_rtx call below will not unshare the MEM since
307 it shares ADDRESSOF. */
308 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
309 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
310 p = XEXP (XEXP (p, 0), 0);
312 RTVEC_ELT (arg_vector, i) = p;
314 if (GET_CODE (p) == REG)
315 parmdecl_map[REGNO (p)] = parms;
316 else if (GET_CODE (p) == CONCAT)
318 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
319 rtx pimag = gen_imagpart (GET_MODE (preal), p);
321 if (GET_CODE (preal) == REG)
322 parmdecl_map[REGNO (preal)] = parms;
323 if (GET_CODE (pimag) == REG)
324 parmdecl_map[REGNO (pimag)] = parms;
327 /* This flag is cleared later
328 if the function ever modifies the value of the parm. */
329 TREE_READONLY (parms) = 1;
332 return arg_vector;
335 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
336 originally was in the FROM_FN, but now it will be in the
337 TO_FN. */
339 tree
340 copy_decl_for_inlining (decl, from_fn, to_fn)
341 tree decl;
342 tree from_fn;
343 tree to_fn;
345 tree copy;
347 /* Copy the declaration. */
348 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
350 tree type;
351 int invisiref = 0;
353 /* See if the frontend wants to pass this by invisible reference. */
354 if (TREE_CODE (decl) == PARM_DECL
355 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
356 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
357 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
359 invisiref = 1;
360 type = DECL_ARG_TYPE (decl);
362 else
363 type = TREE_TYPE (decl);
365 /* For a parameter, we must make an equivalent VAR_DECL, not a
366 new PARM_DECL. */
367 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
368 if (!invisiref)
370 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
371 TREE_READONLY (copy) = TREE_READONLY (decl);
372 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
374 else
376 TREE_ADDRESSABLE (copy) = 0;
377 TREE_READONLY (copy) = 1;
378 TREE_THIS_VOLATILE (copy) = 0;
381 else
383 copy = copy_node (decl);
384 (*lang_hooks.dup_lang_specific_decl) (copy);
386 /* TREE_ADDRESSABLE isn't used to indicate that a label's
387 address has been taken; it's for internal bookkeeping in
388 expand_goto_internal. */
389 if (TREE_CODE (copy) == LABEL_DECL)
390 TREE_ADDRESSABLE (copy) = 0;
393 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
394 declaration inspired this copy. */
395 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
397 /* The new variable/label has no RTL, yet. */
398 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
399 SET_DECL_RTL (copy, NULL_RTX);
401 /* These args would always appear unused, if not for this. */
402 TREE_USED (copy) = 1;
404 /* Set the context for the new declaration. */
405 if (!DECL_CONTEXT (decl))
406 /* Globals stay global. */
408 else if (DECL_CONTEXT (decl) != from_fn)
409 /* Things that weren't in the scope of the function we're inlining
410 from aren't in the scope we're inlining to, either. */
412 else if (TREE_STATIC (decl))
413 /* Function-scoped static variables should stay in the original
414 function. */
416 else
417 /* Ordinary automatic local variables are now in the scope of the
418 new function. */
419 DECL_CONTEXT (copy) = to_fn;
421 return copy;
424 /* Make the insns and PARM_DECLs of the current function permanent
425 and record other information in DECL_SAVED_INSNS to allow inlining
426 of this function in subsequent calls.
428 This routine need not copy any insns because we are not going
429 to immediately compile the insns in the insn chain. There
430 are two cases when we would compile the insns for FNDECL:
431 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
432 be output at the end of other compilation, because somebody took
433 its address. In the first case, the insns of FNDECL are copied
434 as it is expanded inline, so FNDECL's saved insns are not
435 modified. In the second case, FNDECL is used for the last time,
436 so modifying the rtl is not a problem.
438 We don't have to worry about FNDECL being inline expanded by
439 other functions which are written at the end of compilation
440 because flag_no_inline is turned on when we begin writing
441 functions at the end of compilation. */
443 void
444 save_for_inline (fndecl)
445 tree fndecl;
447 rtx insn;
448 rtvec argvec;
449 rtx first_nonparm_insn;
451 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
452 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
453 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
454 for the parms, prior to elimination of virtual registers.
455 These values are needed for substituting parms properly. */
456 if (! flag_no_inline)
457 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
459 /* Make and emit a return-label if we have not already done so. */
461 if (return_label == 0)
463 return_label = gen_label_rtx ();
464 emit_label (return_label);
467 if (! flag_no_inline)
468 argvec = initialize_for_inline (fndecl);
469 else
470 argvec = NULL;
472 /* Delete basic block notes created by early run of find_basic_block.
473 The notes would be later used by find_basic_blocks to reuse the memory
474 for basic_block structures on already freed obstack. */
475 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
476 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
477 delete_related_insns (insn);
479 /* If there are insns that copy parms from the stack into pseudo registers,
480 those insns are not copied. `expand_inline_function' must
481 emit the correct code to handle such things. */
483 insn = get_insns ();
484 if (GET_CODE (insn) != NOTE)
485 abort ();
487 if (! flag_no_inline)
489 /* Get the insn which signals the end of parameter setup code. */
490 first_nonparm_insn = get_first_nonparm_insn ();
492 /* Now just scan the chain of insns to see what happens to our
493 PARM_DECLs. If a PARM_DECL is used but never modified, we
494 can substitute its rtl directly when expanding inline (and
495 perform constant folding when its incoming value is
496 constant). Otherwise, we have to copy its value into a new
497 register and track the new register's life. */
498 in_nonparm_insns = 0;
499 save_parm_insns (insn, first_nonparm_insn);
501 cfun->inl_max_label_num = max_label_num ();
502 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
503 cfun->original_arg_vector = argvec;
505 cfun->original_decl_initial = DECL_INITIAL (fndecl);
506 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
507 DECL_SAVED_INSNS (fndecl) = cfun;
509 /* Clean up. */
510 if (! flag_no_inline)
511 free (parmdecl_map);
514 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
515 PARM_DECL is used but never modified, we can substitute its rtl directly
516 when expanding inline (and perform constant folding when its incoming
517 value is constant). Otherwise, we have to copy its value into a new
518 register and track the new register's life. */
520 static void
521 save_parm_insns (insn, first_nonparm_insn)
522 rtx insn;
523 rtx first_nonparm_insn;
525 if (insn == NULL_RTX)
526 return;
528 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
530 if (insn == first_nonparm_insn)
531 in_nonparm_insns = 1;
533 if (INSN_P (insn))
535 /* Record what interesting things happen to our parameters. */
536 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
538 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
539 three attached sequences: normal call, sibling call and tail
540 recursion. */
541 if (GET_CODE (insn) == CALL_INSN
542 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
544 int i;
546 for (i = 0; i < 3; i++)
547 save_parm_insns (XEXP (PATTERN (insn), i),
548 first_nonparm_insn);
554 /* Note whether a parameter is modified or not. */
556 static void
557 note_modified_parmregs (reg, x, data)
558 rtx reg;
559 rtx x ATTRIBUTE_UNUSED;
560 void *data ATTRIBUTE_UNUSED;
562 if (GET_CODE (reg) == REG && in_nonparm_insns
563 && REGNO (reg) < max_parm_reg
564 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
565 && parmdecl_map[REGNO (reg)] != 0)
566 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
569 /* Unfortunately, we need a global copy of const_equiv map for communication
570 with a function called from note_stores. Be *very* careful that this
571 is used properly in the presence of recursion. */
573 varray_type global_const_equiv_varray;
575 #define FIXED_BASE_PLUS_P(X) \
576 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && GET_CODE (XEXP (X, 0)) == REG \
578 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
579 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
581 /* Called to set up a mapping for the case where a parameter is in a
582 register. If it is read-only and our argument is a constant, set up the
583 constant equivalence.
585 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
586 if it is a register.
588 Also, don't allow hard registers here; they might not be valid when
589 substituted into insns. */
590 static void
591 process_reg_param (map, loc, copy)
592 struct inline_remap *map;
593 rtx loc, copy;
595 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
596 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
597 && ! REG_USERVAR_P (copy))
598 || (GET_CODE (copy) == REG
599 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
601 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
602 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
603 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
604 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
605 copy = temp;
607 map->reg_map[REGNO (loc)] = copy;
610 /* Compare two BLOCKs for qsort. The key we sort on is the
611 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
612 two pointers, because it may overflow sizeof(int). */
614 static int
615 compare_blocks (v1, v2)
616 const void *v1;
617 const void *v2;
619 tree b1 = *((const tree *) v1);
620 tree b2 = *((const tree *) v2);
621 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
622 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
624 if (p1 == p2)
625 return 0;
626 return p1 < p2 ? -1 : 1;
629 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
630 an original block; the second to a remapped equivalent. */
632 static int
633 find_block (v1, v2)
634 const void *v1;
635 const void *v2;
637 const union tree_node *b1 = (const union tree_node *) v1;
638 tree b2 = *((const tree *) v2);
639 char *p1 = (char *) b1;
640 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
642 if (p1 == p2)
643 return 0;
644 return p1 < p2 ? -1 : 1;
647 /* Integrate the procedure defined by FNDECL. Note that this function
648 may wind up calling itself. Since the static variables are not
649 reentrant, we do not assign them until after the possibility
650 of recursion is eliminated.
652 If IGNORE is nonzero, do not produce a value.
653 Otherwise store the value in TARGET if it is nonzero and that is convenient.
655 Value is:
656 (rtx)-1 if we could not substitute the function
657 0 if we substituted it and it does not produce a value
658 else an rtx for where the value is stored. */
661 expand_inline_function (fndecl, parms, target, ignore, type,
662 structure_value_addr)
663 tree fndecl, parms;
664 rtx target;
665 int ignore;
666 tree type;
667 rtx structure_value_addr;
669 struct function *inlining_previous;
670 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
671 tree formal, actual, block;
672 rtx parm_insns = inl_f->emit->x_first_insn;
673 rtx insns = (inl_f->inl_last_parm_insn
674 ? NEXT_INSN (inl_f->inl_last_parm_insn)
675 : parm_insns);
676 tree *arg_trees;
677 rtx *arg_vals;
678 int max_regno;
679 int i;
680 int min_labelno = inl_f->emit->x_first_label_num;
681 int max_labelno = inl_f->inl_max_label_num;
682 int nargs;
683 rtx loc;
684 rtx stack_save = 0;
685 rtx temp;
686 struct inline_remap *map = 0;
687 rtvec arg_vector = inl_f->original_arg_vector;
688 rtx static_chain_value = 0;
689 int inl_max_uid;
690 int eh_region_offset;
692 /* The pointer used to track the true location of the memory used
693 for MAP->LABEL_MAP. */
694 rtx *real_label_map = 0;
696 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
697 max_regno = inl_f->emit->x_reg_rtx_no + 3;
698 if (max_regno < FIRST_PSEUDO_REGISTER)
699 abort ();
701 /* Pull out the decl for the function definition; fndecl may be a
702 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
703 fndecl = inl_f->decl;
705 nargs = list_length (DECL_ARGUMENTS (fndecl));
707 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
708 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
710 /* Check that the parms type match and that sufficient arguments were
711 passed. Since the appropriate conversions or default promotions have
712 already been applied, the machine modes should match exactly. */
714 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
715 formal;
716 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
718 tree arg;
719 enum machine_mode mode;
721 if (actual == 0)
722 return (rtx) (size_t) -1;
724 arg = TREE_VALUE (actual);
725 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
727 if (arg == error_mark_node
728 || mode != TYPE_MODE (TREE_TYPE (arg))
729 /* If they are block mode, the types should match exactly.
730 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
731 which could happen if the parameter has incomplete type. */
732 || (mode == BLKmode
733 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
734 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
735 return (rtx) (size_t) -1;
738 /* If there is a TARGET which is a readonly BLKmode MEM and DECL_RESULT
739 is also a mem, we are going to lose the readonly on the stores, so don't
740 inline. */
741 if (target != 0 && GET_CODE (target) == MEM && GET_MODE (target) == BLKmode
742 && RTX_UNCHANGING_P (target) && DECL_RTL_SET_P (DECL_RESULT (fndecl))
743 && GET_CODE (DECL_RTL (DECL_RESULT (fndecl))) == MEM)
744 return (rtx) (size_t) -1;
746 /* Extra arguments are valid, but will be ignored below, so we must
747 evaluate them here for side-effects. */
748 for (; actual; actual = TREE_CHAIN (actual))
749 expand_expr (TREE_VALUE (actual), const0_rtx,
750 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
752 /* Expand the function arguments. Do this first so that any
753 new registers get created before we allocate the maps. */
755 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
756 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
758 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
759 formal;
760 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
762 /* Actual parameter, converted to the type of the argument within the
763 function. */
764 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
765 /* Mode of the variable used within the function. */
766 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
767 int invisiref = 0;
769 arg_trees[i] = arg;
770 loc = RTVEC_ELT (arg_vector, i);
772 /* If this is an object passed by invisible reference, we copy the
773 object into a stack slot and save its address. If this will go
774 into memory, we do nothing now. Otherwise, we just expand the
775 argument. */
776 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
777 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
779 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
781 store_expr (arg, stack_slot, 0);
782 arg_vals[i] = XEXP (stack_slot, 0);
783 invisiref = 1;
785 else if (GET_CODE (loc) != MEM)
787 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
789 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
790 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
792 pmode = promote_mode (TREE_TYPE (formal), pmode,
793 &unsignedp, 0);
795 if (GET_MODE (loc) != pmode)
796 abort ();
798 /* The mode if LOC and ARG can differ if LOC was a variable
799 that had its mode promoted via PROMOTED_MODE. */
800 arg_vals[i] = convert_modes (pmode,
801 TYPE_MODE (TREE_TYPE (arg)),
802 expand_expr (arg, NULL_RTX, mode,
803 EXPAND_SUM),
804 unsignedp);
806 else
807 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
809 else
810 arg_vals[i] = 0;
812 /* If the formal type was const but the actual was not, we might
813 end up here with an rtx wrongly tagged unchanging in the caller's
814 context. Fix that. */
815 if (arg_vals[i] != 0
816 && (GET_CODE (arg_vals[i]) == REG || GET_CODE (arg_vals[i]) == MEM)
817 && ! TREE_READONLY (TREE_VALUE (actual)))
818 RTX_UNCHANGING_P (arg_vals[i]) = 0;
820 if (arg_vals[i] != 0
821 && (! TREE_READONLY (formal)
822 /* If the parameter is not read-only, copy our argument through
823 a register. Also, we cannot use ARG_VALS[I] if it overlaps
824 TARGET in any way. In the inline function, they will likely
825 be two different pseudos, and `safe_from_p' will make all
826 sorts of smart assumptions about their not conflicting.
827 But if ARG_VALS[I] overlaps TARGET, these assumptions are
828 wrong, so put ARG_VALS[I] into a fresh register.
829 Don't worry about invisible references, since their stack
830 temps will never overlap the target. */
831 || (target != 0
832 && ! invisiref
833 && (GET_CODE (arg_vals[i]) == REG
834 || GET_CODE (arg_vals[i]) == SUBREG
835 || GET_CODE (arg_vals[i]) == MEM)
836 && reg_overlap_mentioned_p (arg_vals[i], target))
837 /* ??? We must always copy a SUBREG into a REG, because it might
838 get substituted into an address, and not all ports correctly
839 handle SUBREGs in addresses. */
840 || (GET_CODE (arg_vals[i]) == SUBREG)))
841 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
843 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
844 && POINTER_TYPE_P (TREE_TYPE (formal)))
845 mark_reg_pointer (arg_vals[i],
846 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
849 /* Allocate the structures we use to remap things. */
851 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
852 map->fndecl = fndecl;
854 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
855 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
857 /* We used to use alloca here, but the size of what it would try to
858 allocate would occasionally cause it to exceed the stack limit and
859 cause unpredictable core dumps. */
860 real_label_map
861 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
862 map->label_map = real_label_map;
863 map->local_return_label = NULL_RTX;
865 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
866 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
867 map->min_insnno = 0;
868 map->max_insnno = inl_max_uid;
870 map->integrating = 1;
871 map->compare_src = NULL_RTX;
872 map->compare_mode = VOIDmode;
874 /* const_equiv_varray maps pseudos in our routine to constants, so
875 it needs to be large enough for all our pseudos. This is the
876 number we are currently using plus the number in the called
877 routine, plus 15 for each arg, five to compute the virtual frame
878 pointer, and five for the return value. This should be enough
879 for most cases. We do not reference entries outside the range of
880 the map.
882 ??? These numbers are quite arbitrary and were obtained by
883 experimentation. At some point, we should try to allocate the
884 table after all the parameters are set up so we can more accurately
885 estimate the number of pseudos we will need. */
887 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
888 (max_reg_num ()
889 + (max_regno - FIRST_PSEUDO_REGISTER)
890 + 15 * nargs
891 + 10),
892 "expand_inline_function");
893 map->const_age = 0;
895 /* Record the current insn in case we have to set up pointers to frame
896 and argument memory blocks. If there are no insns yet, add a dummy
897 insn that can be used as an insertion point. */
898 map->insns_at_start = get_last_insn ();
899 if (map->insns_at_start == 0)
900 map->insns_at_start = emit_note (NOTE_INSN_DELETED);
902 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
903 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
905 /* Update the outgoing argument size to allow for those in the inlined
906 function. */
907 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
908 current_function_outgoing_args_size = inl_f->outgoing_args_size;
910 /* If the inline function needs to make PIC references, that means
911 that this function's PIC offset table must be used. */
912 if (inl_f->uses_pic_offset_table)
913 current_function_uses_pic_offset_table = 1;
915 /* If this function needs a context, set it up. */
916 if (inl_f->needs_context)
917 static_chain_value = lookup_static_chain (fndecl);
919 /* If the inlined function calls __builtin_constant_p, then we'll
920 need to call purge_builtin_constant_p on this function. */
921 if (inl_f->calls_constant_p)
922 current_function_calls_constant_p = 1;
924 if (GET_CODE (parm_insns) == NOTE
925 && NOTE_LINE_NUMBER (parm_insns) > 0)
927 rtx note = emit_line_note (NOTE_SOURCE_FILE (parm_insns),
928 NOTE_LINE_NUMBER (parm_insns));
929 if (note)
930 RTX_INTEGRATED_P (note) = 1;
933 /* Process each argument. For each, set up things so that the function's
934 reference to the argument will refer to the argument being passed.
935 We only replace REG with REG here. Any simplifications are done
936 via const_equiv_map.
938 We make two passes: In the first, we deal with parameters that will
939 be placed into registers, since we need to ensure that the allocated
940 register number fits in const_equiv_map. Then we store all non-register
941 parameters into their memory location. */
943 /* Don't try to free temp stack slots here, because we may put one of the
944 parameters into a temp stack slot. */
946 for (i = 0; i < nargs; i++)
948 rtx copy = arg_vals[i];
950 loc = RTVEC_ELT (arg_vector, i);
952 /* There are three cases, each handled separately. */
953 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
954 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
956 /* This must be an object passed by invisible reference (it could
957 also be a variable-sized object, but we forbid inlining functions
958 with variable-sized arguments). COPY is the address of the
959 actual value (this computation will cause it to be copied). We
960 map that address for the register, noting the actual address as
961 an equivalent in case it can be substituted into the insns. */
963 if (GET_CODE (copy) != REG)
965 temp = copy_addr_to_reg (copy);
966 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
967 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
968 copy = temp;
970 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
972 else if (GET_CODE (loc) == MEM)
974 /* This is the case of a parameter that lives in memory. It
975 will live in the block we allocate in the called routine's
976 frame that simulates the incoming argument area. Do nothing
977 with the parameter now; we will call store_expr later. In
978 this case, however, we must ensure that the virtual stack and
979 incoming arg rtx values are expanded now so that we can be
980 sure we have enough slots in the const equiv map since the
981 store_expr call can easily blow the size estimate. */
982 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
983 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
985 else if (GET_CODE (loc) == REG)
986 process_reg_param (map, loc, copy);
987 else if (GET_CODE (loc) == CONCAT)
989 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
990 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
991 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
992 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
994 process_reg_param (map, locreal, copyreal);
995 process_reg_param (map, locimag, copyimag);
997 else
998 abort ();
1001 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
1002 specially. This function can be called recursively, so we need to
1003 save the previous value. */
1004 inlining_previous = inlining;
1005 inlining = inl_f;
1007 /* Now do the parameters that will be placed in memory. */
1009 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1010 formal; formal = TREE_CHAIN (formal), i++)
1012 loc = RTVEC_ELT (arg_vector, i);
1014 if (GET_CODE (loc) == MEM
1015 /* Exclude case handled above. */
1016 && ! (GET_CODE (XEXP (loc, 0)) == REG
1017 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1019 rtx note = emit_line_note (DECL_SOURCE_FILE (formal),
1020 DECL_SOURCE_LINE (formal));
1021 if (note)
1022 RTX_INTEGRATED_P (note) = 1;
1024 /* Compute the address in the area we reserved and store the
1025 value there. */
1026 temp = copy_rtx_and_substitute (loc, map, 1);
1027 subst_constants (&temp, NULL_RTX, map, 1);
1028 apply_change_group ();
1029 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1030 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1031 store_expr (arg_trees[i], temp, 0);
1035 /* Deal with the places that the function puts its result.
1036 We are driven by what is placed into DECL_RESULT.
1038 Initially, we assume that we don't have anything special handling for
1039 REG_FUNCTION_RETURN_VALUE_P. */
1041 map->inline_target = 0;
1042 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1043 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1045 if (TYPE_MODE (type) == VOIDmode)
1046 /* There is no return value to worry about. */
1048 else if (GET_CODE (loc) == MEM)
1050 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1052 temp = copy_rtx_and_substitute (loc, map, 1);
1053 subst_constants (&temp, NULL_RTX, map, 1);
1054 apply_change_group ();
1055 target = temp;
1057 else
1059 if (! structure_value_addr
1060 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1061 abort ();
1063 /* Pass the function the address in which to return a structure
1064 value. Note that a constructor can cause someone to call us
1065 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1066 via the first parameter, rather than the struct return address.
1068 We have two cases: If the address is a simple register
1069 indirect, use the mapping mechanism to point that register to
1070 our structure return address. Otherwise, store the structure
1071 return value into the place that it will be referenced from. */
1073 if (GET_CODE (XEXP (loc, 0)) == REG)
1075 temp = force_operand (structure_value_addr, NULL_RTX);
1076 temp = force_reg (Pmode, temp);
1077 /* A virtual register might be invalid in an insn, because
1078 it can cause trouble in reload. Since we don't have access
1079 to the expanders at map translation time, make sure we have
1080 a proper register now.
1081 If a virtual register is actually valid, cse or combine
1082 can put it into the mapped insns. */
1083 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1084 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1085 temp = copy_to_mode_reg (Pmode, temp);
1086 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1088 if (CONSTANT_P (structure_value_addr)
1089 || GET_CODE (structure_value_addr) == ADDRESSOF
1090 || (GET_CODE (structure_value_addr) == PLUS
1091 && (XEXP (structure_value_addr, 0)
1092 == virtual_stack_vars_rtx)
1093 && (GET_CODE (XEXP (structure_value_addr, 1))
1094 == CONST_INT)))
1096 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1097 CONST_AGE_PARM);
1100 else
1102 temp = copy_rtx_and_substitute (loc, map, 1);
1103 subst_constants (&temp, NULL_RTX, map, 0);
1104 apply_change_group ();
1105 emit_move_insn (temp, structure_value_addr);
1109 else if (ignore)
1110 /* We will ignore the result value, so don't look at its structure.
1111 Note that preparations for an aggregate return value
1112 do need to be made (above) even if it will be ignored. */
1114 else if (GET_CODE (loc) == REG)
1116 /* The function returns an object in a register and we use the return
1117 value. Set up our target for remapping. */
1119 /* Machine mode function was declared to return. */
1120 enum machine_mode departing_mode = TYPE_MODE (type);
1121 /* (Possibly wider) machine mode it actually computes
1122 (for the sake of callers that fail to declare it right).
1123 We have to use the mode of the result's RTL, rather than
1124 its type, since expand_function_start may have promoted it. */
1125 enum machine_mode arriving_mode
1126 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1127 rtx reg_to_map;
1129 /* Don't use MEMs as direct targets because on some machines
1130 substituting a MEM for a REG makes invalid insns.
1131 Let the combiner substitute the MEM if that is valid. */
1132 if (target == 0 || GET_CODE (target) != REG
1133 || GET_MODE (target) != departing_mode)
1135 /* Don't make BLKmode registers. If this looks like
1136 a BLKmode object being returned in a register, get
1137 the mode from that, otherwise abort. */
1138 if (departing_mode == BLKmode)
1140 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1142 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1143 arriving_mode = departing_mode;
1145 else
1146 abort ();
1149 target = gen_reg_rtx (departing_mode);
1152 /* If function's value was promoted before return,
1153 avoid machine mode mismatch when we substitute INLINE_TARGET.
1154 But TARGET is what we will return to the caller. */
1155 if (arriving_mode != departing_mode)
1157 /* Avoid creating a paradoxical subreg wider than
1158 BITS_PER_WORD, since that is illegal. */
1159 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1161 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1162 GET_MODE_BITSIZE (arriving_mode)))
1163 /* Maybe could be handled by using convert_move () ? */
1164 abort ();
1165 reg_to_map = gen_reg_rtx (arriving_mode);
1166 target = gen_lowpart (departing_mode, reg_to_map);
1168 else
1169 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1171 else
1172 reg_to_map = target;
1174 /* Usually, the result value is the machine's return register.
1175 Sometimes it may be a pseudo. Handle both cases. */
1176 if (REG_FUNCTION_VALUE_P (loc))
1177 map->inline_target = reg_to_map;
1178 else
1179 map->reg_map[REGNO (loc)] = reg_to_map;
1181 else if (GET_CODE (loc) == CONCAT)
1183 enum machine_mode departing_mode = TYPE_MODE (type);
1184 enum machine_mode arriving_mode
1185 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1187 if (departing_mode != arriving_mode)
1188 abort ();
1189 if (GET_CODE (XEXP (loc, 0)) != REG
1190 || GET_CODE (XEXP (loc, 1)) != REG)
1191 abort ();
1193 /* Don't use MEMs as direct targets because on some machines
1194 substituting a MEM for a REG makes invalid insns.
1195 Let the combiner substitute the MEM if that is valid. */
1196 if (target == 0 || GET_CODE (target) != REG
1197 || GET_MODE (target) != departing_mode)
1198 target = gen_reg_rtx (departing_mode);
1200 if (GET_CODE (target) != CONCAT)
1201 abort ();
1203 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1204 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1206 else
1207 abort ();
1209 /* Remap the exception handler data pointer from one to the other. */
1210 temp = get_exception_pointer (inl_f);
1211 if (temp)
1212 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1214 /* Initialize label_map. get_label_from_map will actually make
1215 the labels. */
1216 memset ((char *) &map->label_map[min_labelno], 0,
1217 (max_labelno - min_labelno) * sizeof (rtx));
1219 /* Make copies of the decls of the symbols in the inline function, so that
1220 the copies of the variables get declared in the current function. Set
1221 up things so that lookup_static_chain knows that to interpret registers
1222 in SAVE_EXPRs for TYPE_SIZEs as local. */
1223 inline_function_decl = fndecl;
1224 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1225 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1226 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1227 inline_function_decl = 0;
1229 /* Make a fresh binding contour that we can easily remove. Do this after
1230 expanding our arguments so cleanups are properly scoped. */
1231 expand_start_bindings_and_block (0, block);
1233 /* Sort the block-map so that it will be easy to find remapped
1234 blocks later. */
1235 qsort (&VARRAY_TREE (map->block_map, 0),
1236 map->block_map->elements_used,
1237 sizeof (tree),
1238 compare_blocks);
1240 /* Perform postincrements before actually calling the function. */
1241 emit_queue ();
1243 /* Clean up stack so that variables might have smaller offsets. */
1244 do_pending_stack_adjust ();
1246 /* Save a copy of the location of const_equiv_varray for
1247 mark_stores, called via note_stores. */
1248 global_const_equiv_varray = map->const_equiv_varray;
1250 /* If the called function does an alloca, save and restore the
1251 stack pointer around the call. This saves stack space, but
1252 also is required if this inline is being done between two
1253 pushes. */
1254 if (inl_f->calls_alloca)
1255 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1257 /* Map pseudos used for initial hard reg values. */
1258 setup_initial_hard_reg_value_integration (inl_f, map);
1260 /* Now copy the insns one by one. */
1261 copy_insn_list (insns, map, static_chain_value);
1263 /* Duplicate the EH regions. This will create an offset from the
1264 region numbers in the function we're inlining to the region
1265 numbers in the calling function. This must wait until after
1266 copy_insn_list, as we need the insn map to be complete. */
1267 eh_region_offset = duplicate_eh_regions (inl_f, map);
1269 /* Now copy the REG_NOTES for those insns. */
1270 copy_insn_notes (insns, map, eh_region_offset);
1272 /* If the insn sequence required one, emit the return label. */
1273 if (map->local_return_label)
1274 emit_label (map->local_return_label);
1276 /* Restore the stack pointer if we saved it above. */
1277 if (inl_f->calls_alloca)
1278 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1280 if (! cfun->x_whole_function_mode_p)
1281 /* In statement-at-a-time mode, we just tell the front-end to add
1282 this block to the list of blocks at this binding level. We
1283 can't do it the way it's done for function-at-a-time mode the
1284 superblocks have not been created yet. */
1285 (*lang_hooks.decls.insert_block) (block);
1286 else
1288 BLOCK_CHAIN (block)
1289 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1290 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1293 /* End the scope containing the copied formal parameter variables
1294 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1295 here so that expand_end_bindings will not check for unused
1296 variables. That's already been checked for when the inlined
1297 function was defined. */
1298 expand_end_bindings (NULL_TREE, 1, 1);
1300 /* Must mark the line number note after inlined functions as a repeat, so
1301 that the test coverage code can avoid counting the call twice. This
1302 just tells the code to ignore the immediately following line note, since
1303 there already exists a copy of this note before the expanded inline call.
1304 This line number note is still needed for debugging though, so we can't
1305 delete it. */
1306 if (flag_test_coverage)
1307 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
1309 emit_line_note (input_filename, input_line);
1311 /* If the function returns a BLKmode object in a register, copy it
1312 out of the temp register into a BLKmode memory object. */
1313 if (target
1314 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1315 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1316 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1318 if (structure_value_addr)
1320 target = gen_rtx_MEM (TYPE_MODE (type),
1321 memory_address (TYPE_MODE (type),
1322 structure_value_addr));
1323 set_mem_attributes (target, type, 1);
1326 /* Make sure we free the things we explicitly allocated with xmalloc. */
1327 if (real_label_map)
1328 free (real_label_map);
1329 VARRAY_FREE (map->const_equiv_varray);
1330 free (map->reg_map);
1331 free (map->insn_map);
1332 free (map);
1333 free (arg_vals);
1334 free (arg_trees);
1336 inlining = inlining_previous;
1338 return target;
1341 /* Make copies of each insn in the given list using the mapping
1342 computed in expand_inline_function. This function may call itself for
1343 insns containing sequences.
1345 Copying is done in two passes, first the insns and then their REG_NOTES.
1347 If static_chain_value is nonzero, it represents the context-pointer
1348 register for the function. */
1350 static void
1351 copy_insn_list (insns, map, static_chain_value)
1352 rtx insns;
1353 struct inline_remap *map;
1354 rtx static_chain_value;
1356 int i;
1357 rtx insn;
1358 rtx temp;
1359 #ifdef HAVE_cc0
1360 rtx cc0_insn = 0;
1361 #endif
1362 rtx static_chain_mem = 0;
1364 /* Copy the insns one by one. Do this in two passes, first the insns and
1365 then their REG_NOTES. */
1367 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1369 for (insn = insns; insn; insn = NEXT_INSN (insn))
1371 rtx copy, pattern, set;
1373 map->orig_asm_operands_vector = 0;
1375 switch (GET_CODE (insn))
1377 case INSN:
1378 pattern = PATTERN (insn);
1379 set = single_set (insn);
1380 copy = 0;
1381 if (GET_CODE (pattern) == USE
1382 && GET_CODE (XEXP (pattern, 0)) == REG
1383 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1384 /* The (USE (REG n)) at return from the function should
1385 be ignored since we are changing (REG n) into
1386 inline_target. */
1387 break;
1389 /* Ignore setting a function value that we don't want to use. */
1390 if (map->inline_target == 0
1391 && set != 0
1392 && GET_CODE (SET_DEST (set)) == REG
1393 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1395 if (volatile_refs_p (SET_SRC (set)))
1397 rtx new_set;
1399 /* If we must not delete the source,
1400 load it into a new temporary. */
1401 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1403 new_set = single_set (copy);
1404 if (new_set == 0)
1405 abort ();
1407 SET_DEST (new_set)
1408 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1410 /* If the source and destination are the same and it
1411 has a note on it, keep the insn. */
1412 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1413 && REG_NOTES (insn) != 0)
1414 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1415 else
1416 break;
1419 /* Similarly if an ignored return value is clobbered. */
1420 else if (map->inline_target == 0
1421 && GET_CODE (pattern) == CLOBBER
1422 && GET_CODE (XEXP (pattern, 0)) == REG
1423 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1424 break;
1426 /* Look for the address of the static chain slot. The
1427 rtx_equal_p comparisons against the
1428 static_chain_incoming_rtx below may fail if the static
1429 chain is in memory and the address specified is not
1430 "legitimate". This happens on Xtensa where the static
1431 chain is at a negative offset from argp and where only
1432 positive offsets are legitimate. When the RTL is
1433 generated, the address is "legitimized" by copying it
1434 into a register, causing the rtx_equal_p comparisons to
1435 fail. This workaround looks for code that sets a
1436 register to the address of the static chain. Subsequent
1437 memory references via that register can then be
1438 identified as static chain references. We assume that
1439 the register is only assigned once, and that the static
1440 chain address is only live in one register at a time. */
1442 else if (static_chain_value != 0
1443 && set != 0
1444 && GET_CODE (static_chain_incoming_rtx) == MEM
1445 && GET_CODE (SET_DEST (set)) == REG
1446 && rtx_equal_p (SET_SRC (set),
1447 XEXP (static_chain_incoming_rtx, 0)))
1449 static_chain_mem =
1450 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1451 SET_DEST (set));
1453 /* emit the instruction in case it is used for something
1454 other than setting the static chain; if it's not used,
1455 it can always be removed as dead code */
1456 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1459 /* If this is setting the static chain rtx, omit it. */
1460 else if (static_chain_value != 0
1461 && set != 0
1462 && (rtx_equal_p (SET_DEST (set),
1463 static_chain_incoming_rtx)
1464 || (static_chain_mem
1465 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1466 break;
1468 /* If this is setting the static chain pseudo, set it from
1469 the value we want to give it instead. */
1470 else if (static_chain_value != 0
1471 && set != 0
1472 && (rtx_equal_p (SET_SRC (set),
1473 static_chain_incoming_rtx)
1474 || (static_chain_mem
1475 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1477 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1479 copy = emit_move_insn (newdest, static_chain_value);
1480 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1481 static_chain_value = 0;
1484 /* If this is setting the virtual stack vars register, this must
1485 be the code at the handler for a builtin longjmp. The value
1486 saved in the setjmp buffer will be the address of the frame
1487 we've made for this inlined instance within our frame. But we
1488 know the offset of that value so we can use it to reconstruct
1489 our virtual stack vars register from that value. If we are
1490 copying it from the stack pointer, leave it unchanged. */
1491 else if (set != 0
1492 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1494 HOST_WIDE_INT offset;
1495 temp = map->reg_map[REGNO (SET_DEST (set))];
1496 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1497 REGNO (temp)).rtx;
1499 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1500 offset = 0;
1501 else if (GET_CODE (temp) == PLUS
1502 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1503 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1504 offset = INTVAL (XEXP (temp, 1));
1505 else
1506 abort ();
1508 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1509 temp = SET_SRC (set);
1510 else
1511 temp = force_operand (plus_constant (SET_SRC (set),
1512 - offset),
1513 NULL_RTX);
1515 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1518 else
1519 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1520 /* REG_NOTES will be copied later. */
1522 #ifdef HAVE_cc0
1523 /* If this insn is setting CC0, it may need to look at
1524 the insn that uses CC0 to see what type of insn it is.
1525 In that case, the call to recog via validate_change will
1526 fail. So don't substitute constants here. Instead,
1527 do it when we emit the following insn.
1529 For example, see the pyr.md file. That machine has signed and
1530 unsigned compares. The compare patterns must check the
1531 following branch insn to see which what kind of compare to
1532 emit.
1534 If the previous insn set CC0, substitute constants on it as
1535 well. */
1536 if (sets_cc0_p (PATTERN (copy)) != 0)
1537 cc0_insn = copy;
1538 else
1540 if (cc0_insn)
1541 try_constants (cc0_insn, map);
1542 cc0_insn = 0;
1543 try_constants (copy, map);
1545 #else
1546 try_constants (copy, map);
1547 #endif
1548 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1549 break;
1551 case JUMP_INSN:
1552 if (map->integrating && returnjump_p (insn))
1554 if (map->local_return_label == 0)
1555 map->local_return_label = gen_label_rtx ();
1556 pattern = gen_jump (map->local_return_label);
1558 else
1559 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1561 copy = emit_jump_insn (pattern);
1563 #ifdef HAVE_cc0
1564 if (cc0_insn)
1565 try_constants (cc0_insn, map);
1566 cc0_insn = 0;
1567 #endif
1568 try_constants (copy, map);
1569 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1571 /* If this used to be a conditional jump insn but whose branch
1572 direction is now know, we must do something special. */
1573 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1575 #ifdef HAVE_cc0
1576 /* If the previous insn set cc0 for us, delete it. */
1577 if (only_sets_cc0_p (PREV_INSN (copy)))
1578 delete_related_insns (PREV_INSN (copy));
1579 #endif
1581 /* If this is now a no-op, delete it. */
1582 if (map->last_pc_value == pc_rtx)
1584 delete_related_insns (copy);
1585 copy = 0;
1587 else
1588 /* Otherwise, this is unconditional jump so we must put a
1589 BARRIER after it. We could do some dead code elimination
1590 here, but jump.c will do it just as well. */
1591 emit_barrier ();
1593 break;
1595 case CALL_INSN:
1596 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1597 three attached sequences: normal call, sibling call and tail
1598 recursion. */
1599 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1601 rtx sequence[3];
1602 rtx tail_label;
1604 for (i = 0; i < 3; i++)
1606 rtx seq;
1608 sequence[i] = NULL_RTX;
1609 seq = XEXP (PATTERN (insn), i);
1610 if (seq)
1612 start_sequence ();
1613 copy_insn_list (seq, map, static_chain_value);
1614 sequence[i] = get_insns ();
1615 end_sequence ();
1619 /* Find the new tail recursion label.
1620 It will already be substituted into sequence[2]. */
1621 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1622 map, 0);
1624 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1625 sequence[0],
1626 sequence[1],
1627 sequence[2],
1628 tail_label));
1629 break;
1632 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1633 copy = emit_call_insn (pattern);
1635 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1636 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1637 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1639 /* Because the USAGE information potentially contains objects other
1640 than hard registers, we need to copy it. */
1642 CALL_INSN_FUNCTION_USAGE (copy)
1643 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1644 map, 0);
1646 #ifdef HAVE_cc0
1647 if (cc0_insn)
1648 try_constants (cc0_insn, map);
1649 cc0_insn = 0;
1650 #endif
1651 try_constants (copy, map);
1653 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1654 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1655 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1656 break;
1658 case CODE_LABEL:
1659 copy = emit_label (get_label_from_map (map,
1660 CODE_LABEL_NUMBER (insn)));
1661 LABEL_NAME (copy) = LABEL_NAME (insn);
1662 map->const_age++;
1663 break;
1665 case BARRIER:
1666 copy = emit_barrier ();
1667 break;
1669 case NOTE:
1670 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1672 copy = emit_label (get_label_from_map (map,
1673 CODE_LABEL_NUMBER (insn)));
1674 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1675 map->const_age++;
1676 break;
1679 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1680 discarded because it is important to have only one of
1681 each in the current function.
1683 NOTE_INSN_DELETED notes aren't useful. */
1685 if (NOTE_LINE_NUMBER (insn) > 0)
1686 copy = emit_line_note (NOTE_SOURCE_FILE (insn),
1687 NOTE_LINE_NUMBER (insn));
1688 else if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1689 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1690 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1692 copy = emit_note (NOTE_LINE_NUMBER (insn));
1693 NOTE_DATA (copy) = NOTE_DATA (insn);
1694 if ((NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1695 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1696 && NOTE_BLOCK (insn))
1698 tree *mapped_block_p;
1700 mapped_block_p
1701 = (tree *) bsearch (NOTE_BLOCK (insn),
1702 &VARRAY_TREE (map->block_map, 0),
1703 map->block_map->elements_used,
1704 sizeof (tree),
1705 find_block);
1707 if (!mapped_block_p)
1708 abort ();
1709 else
1710 NOTE_BLOCK (copy) = *mapped_block_p;
1712 else if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1713 NOTE_EXPECTED_VALUE (copy)
1714 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1715 map, 0);
1717 else
1718 copy = 0;
1719 break;
1721 default:
1722 abort ();
1725 if (copy)
1726 RTX_INTEGRATED_P (copy) = 1;
1728 map->insn_map[INSN_UID (insn)] = copy;
1732 /* Copy the REG_NOTES. Increment const_age, so that only constants
1733 from parameters can be substituted in. These are the only ones
1734 that are valid across the entire function. */
1736 static void
1737 copy_insn_notes (insns, map, eh_region_offset)
1738 rtx insns;
1739 struct inline_remap *map;
1740 int eh_region_offset;
1742 rtx insn, new_insn;
1744 map->const_age++;
1745 for (insn = insns; insn; insn = NEXT_INSN (insn))
1747 if (! INSN_P (insn))
1748 continue;
1750 new_insn = map->insn_map[INSN_UID (insn)];
1751 if (! new_insn)
1752 continue;
1754 if (REG_NOTES (insn))
1756 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1758 /* We must also do subst_constants, in case one of our parameters
1759 has const type and constant value. */
1760 subst_constants (&note, NULL_RTX, map, 0);
1761 apply_change_group ();
1762 REG_NOTES (new_insn) = note;
1764 /* Delete any REG_LABEL notes from the chain. Remap any
1765 REG_EH_REGION notes. */
1766 for (; note; note = next)
1768 next = XEXP (note, 1);
1769 if (REG_NOTE_KIND (note) == REG_LABEL)
1770 remove_note (new_insn, note);
1771 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1772 && INTVAL (XEXP (note, 0)) > 0)
1773 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1774 + eh_region_offset);
1778 if (GET_CODE (insn) == CALL_INSN
1779 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1781 int i;
1782 for (i = 0; i < 3; i++)
1783 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1786 if (GET_CODE (insn) == JUMP_INSN
1787 && GET_CODE (PATTERN (insn)) == RESX)
1788 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1792 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1793 push all of those decls and give each one the corresponding home. */
1795 static void
1796 integrate_parm_decls (args, map, arg_vector)
1797 tree args;
1798 struct inline_remap *map;
1799 rtvec arg_vector;
1801 tree tail;
1802 int i;
1804 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1806 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1807 current_function_decl);
1808 rtx new_decl_rtl
1809 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1811 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1812 here, but that's going to require some more work. */
1813 /* DECL_INCOMING_RTL (decl) = ?; */
1814 /* Fully instantiate the address with the equivalent form so that the
1815 debugging information contains the actual register, instead of the
1816 virtual register. Do this by not passing an insn to
1817 subst_constants. */
1818 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1819 apply_change_group ();
1820 SET_DECL_RTL (decl, new_decl_rtl);
1824 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1825 current function a tree of contexts isomorphic to the one that is given.
1827 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1828 registers used in the DECL_RTL field should be remapped. If it is zero,
1829 no mapping is necessary. */
1831 static tree
1832 integrate_decl_tree (let, map)
1833 tree let;
1834 struct inline_remap *map;
1836 tree t;
1837 tree new_block;
1838 tree *next;
1840 new_block = make_node (BLOCK);
1841 VARRAY_PUSH_TREE (map->block_map, new_block);
1842 next = &BLOCK_VARS (new_block);
1844 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1846 tree d;
1848 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1850 if (DECL_RTL_SET_P (t))
1852 rtx r;
1854 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1856 /* Fully instantiate the address with the equivalent form so that the
1857 debugging information contains the actual register, instead of the
1858 virtual register. Do this by not passing an insn to
1859 subst_constants. */
1860 r = DECL_RTL (d);
1861 subst_constants (&r, NULL_RTX, map, 1);
1862 SET_DECL_RTL (d, r);
1864 apply_change_group ();
1867 /* Add this declaration to the list of variables in the new
1868 block. */
1869 *next = d;
1870 next = &TREE_CHAIN (d);
1873 next = &BLOCK_SUBBLOCKS (new_block);
1874 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1876 *next = integrate_decl_tree (t, map);
1877 BLOCK_SUPERCONTEXT (*next) = new_block;
1878 next = &BLOCK_CHAIN (*next);
1881 TREE_USED (new_block) = TREE_USED (let);
1882 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1884 return new_block;
1887 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1888 except for those few rtx codes that are sharable.
1890 We always return an rtx that is similar to that incoming rtx, with the
1891 exception of possibly changing a REG to a SUBREG or vice versa. No
1892 rtl is ever emitted.
1894 If FOR_LHS is nonzero, if means we are processing something that will
1895 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1896 inlining since we need to be conservative in how it is set for
1897 such cases.
1899 Handle constants that need to be placed in the constant pool by
1900 calling `force_const_mem'. */
1903 copy_rtx_and_substitute (orig, map, for_lhs)
1904 rtx orig;
1905 struct inline_remap *map;
1906 int for_lhs;
1908 rtx copy, temp;
1909 int i, j;
1910 RTX_CODE code;
1911 enum machine_mode mode;
1912 const char *format_ptr;
1913 int regno;
1915 if (orig == 0)
1916 return 0;
1918 code = GET_CODE (orig);
1919 mode = GET_MODE (orig);
1921 switch (code)
1923 case REG:
1924 /* If the stack pointer register shows up, it must be part of
1925 stack-adjustments (*not* because we eliminated the frame pointer!).
1926 Small hard registers are returned as-is. Pseudo-registers
1927 go through their `reg_map'. */
1928 regno = REGNO (orig);
1929 if (regno <= LAST_VIRTUAL_REGISTER
1930 || (map->integrating
1931 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1933 /* Some hard registers are also mapped,
1934 but others are not translated. */
1935 if (map->reg_map[regno] != 0)
1936 return map->reg_map[regno];
1938 /* If this is the virtual frame pointer, make space in current
1939 function's stack frame for the stack frame of the inline function.
1941 Copy the address of this area into a pseudo. Map
1942 virtual_stack_vars_rtx to this pseudo and set up a constant
1943 equivalence for it to be the address. This will substitute the
1944 address into insns where it can be substituted and use the new
1945 pseudo where it can't. */
1946 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1948 rtx loc, seq;
1949 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1950 #ifdef FRAME_GROWS_DOWNWARD
1951 int alignment
1952 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1953 / BITS_PER_UNIT);
1955 /* In this case, virtual_stack_vars_rtx points to one byte
1956 higher than the top of the frame area. So make sure we
1957 allocate a big enough chunk to keep the frame pointer
1958 aligned like a real one. */
1959 if (alignment)
1960 size = CEIL_ROUND (size, alignment);
1961 #endif
1962 start_sequence ();
1963 loc = assign_stack_temp (BLKmode, size, 1);
1964 loc = XEXP (loc, 0);
1965 #ifdef FRAME_GROWS_DOWNWARD
1966 /* In this case, virtual_stack_vars_rtx points to one byte
1967 higher than the top of the frame area. So compute the offset
1968 to one byte higher than our substitute frame. */
1969 loc = plus_constant (loc, size);
1970 #endif
1971 map->reg_map[regno] = temp
1972 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1974 #ifdef STACK_BOUNDARY
1975 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1976 #endif
1978 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1980 seq = get_insns ();
1981 end_sequence ();
1982 emit_insn_after (seq, map->insns_at_start);
1983 return temp;
1985 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1986 || (map->integrating
1987 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1988 == orig)))
1990 /* Do the same for a block to contain any arguments referenced
1991 in memory. */
1992 rtx loc, seq;
1993 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1995 start_sequence ();
1996 loc = assign_stack_temp (BLKmode, size, 1);
1997 loc = XEXP (loc, 0);
1998 /* When arguments grow downward, the virtual incoming
1999 args pointer points to the top of the argument block,
2000 so the remapped location better do the same. */
2001 #ifdef ARGS_GROW_DOWNWARD
2002 loc = plus_constant (loc, size);
2003 #endif
2004 map->reg_map[regno] = temp
2005 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2007 #ifdef STACK_BOUNDARY
2008 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
2009 #endif
2011 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2013 seq = get_insns ();
2014 end_sequence ();
2015 emit_insn_after (seq, map->insns_at_start);
2016 return temp;
2018 else if (REG_FUNCTION_VALUE_P (orig))
2020 /* This is a reference to the function return value. If
2021 the function doesn't have a return value, error. If the
2022 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2023 if (map->inline_target == 0)
2025 if (rtx_equal_function_value_matters)
2026 /* This is an ignored return value. We must not
2027 leave it in with REG_FUNCTION_VALUE_P set, since
2028 that would confuse subsequent inlining of the
2029 current function into a later function. */
2030 return gen_rtx_REG (GET_MODE (orig), regno);
2031 else
2032 /* Must be unrolling loops or replicating code if we
2033 reach here, so return the register unchanged. */
2034 return orig;
2036 else if (GET_MODE (map->inline_target) != BLKmode
2037 && mode != GET_MODE (map->inline_target))
2038 return gen_lowpart (mode, map->inline_target);
2039 else
2040 return map->inline_target;
2042 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2043 /* If leaf_renumber_regs_insn() might remap this register to
2044 some other number, make sure we don't share it with the
2045 inlined function, otherwise delayed optimization of the
2046 inlined function may change it in place, breaking our
2047 reference to it. We may still shared it within the
2048 function, so create an entry for this register in the
2049 reg_map. */
2050 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2051 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2053 if (!map->leaf_reg_map[regno][mode])
2054 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2055 return map->leaf_reg_map[regno][mode];
2057 #endif
2058 else
2059 return orig;
2061 abort ();
2063 if (map->reg_map[regno] == NULL)
2065 map->reg_map[regno] = gen_reg_rtx (mode);
2066 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2067 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2068 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2069 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2071 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2072 mark_reg_pointer (map->reg_map[regno],
2073 map->regno_pointer_align[regno]);
2075 return map->reg_map[regno];
2077 case SUBREG:
2078 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2079 return simplify_gen_subreg (GET_MODE (orig), copy,
2080 GET_MODE (SUBREG_REG (orig)),
2081 SUBREG_BYTE (orig));
2083 case ADDRESSOF:
2084 copy = gen_rtx_ADDRESSOF (mode,
2085 copy_rtx_and_substitute (XEXP (orig, 0),
2086 map, for_lhs),
2087 0, ADDRESSOF_DECL (orig));
2088 regno = ADDRESSOF_REGNO (orig);
2089 if (map->reg_map[regno])
2090 regno = REGNO (map->reg_map[regno]);
2091 else if (regno > LAST_VIRTUAL_REGISTER)
2093 temp = XEXP (orig, 0);
2094 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2095 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2096 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2097 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2098 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2100 /* Objects may initially be represented as registers, but
2101 but turned into a MEM if their address is taken by
2102 put_var_into_stack. Therefore, the register table may have
2103 entries which are MEMs.
2105 We briefly tried to clear such entries, but that ended up
2106 cascading into many changes due to the optimizers not being
2107 prepared for empty entries in the register table. So we've
2108 decided to allow the MEMs in the register table for now. */
2109 if (REG_P (map->x_regno_reg_rtx[regno])
2110 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2111 mark_reg_pointer (map->reg_map[regno],
2112 map->regno_pointer_align[regno]);
2113 regno = REGNO (map->reg_map[regno]);
2115 ADDRESSOF_REGNO (copy) = regno;
2116 return copy;
2118 case USE:
2119 case CLOBBER:
2120 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2121 to (use foo) if the original insn didn't have a subreg.
2122 Removing the subreg distorts the VAX movstrhi pattern
2123 by changing the mode of an operand. */
2124 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2125 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2126 copy = SUBREG_REG (copy);
2127 return gen_rtx_fmt_e (code, VOIDmode, copy);
2129 /* We need to handle "deleted" labels that appear in the DECL_RTL
2130 of a LABEL_DECL. */
2131 case NOTE:
2132 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2133 break;
2135 /* ... FALLTHRU ... */
2136 case CODE_LABEL:
2137 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2138 = LABEL_PRESERVE_P (orig);
2139 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2141 case LABEL_REF:
2142 copy
2143 = gen_rtx_LABEL_REF
2144 (mode,
2145 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2146 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2148 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2150 /* The fact that this label was previously nonlocal does not mean
2151 it still is, so we must check if it is within the range of
2152 this function's labels. */
2153 LABEL_REF_NONLOCAL_P (copy)
2154 = (LABEL_REF_NONLOCAL_P (orig)
2155 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2156 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2158 /* If we have made a nonlocal label local, it means that this
2159 inlined call will be referring to our nonlocal goto handler.
2160 So make sure we create one for this block; we normally would
2161 not since this is not otherwise considered a "call". */
2162 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2163 function_call_count++;
2165 return copy;
2167 case PC:
2168 case CC0:
2169 case CONST_INT:
2170 case CONST_VECTOR:
2171 return orig;
2173 case SYMBOL_REF:
2174 /* Symbols which represent the address of a label stored in the constant
2175 pool must be modified to point to a constant pool entry for the
2176 remapped label. Otherwise, symbols are returned unchanged. */
2177 if (CONSTANT_POOL_ADDRESS_P (orig))
2179 struct function *f = inlining ? inlining : cfun;
2180 rtx constant = get_pool_constant_for_function (f, orig);
2181 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2182 if (inlining)
2184 rtx temp = force_const_mem (const_mode,
2185 copy_rtx_and_substitute (constant,
2186 map, 0));
2188 #if 0
2189 /* Legitimizing the address here is incorrect.
2191 Since we had a SYMBOL_REF before, we can assume it is valid
2192 to have one in this position in the insn.
2194 Also, change_address may create new registers. These
2195 registers will not have valid reg_map entries. This can
2196 cause try_constants() to fail because assumes that all
2197 registers in the rtx have valid reg_map entries, and it may
2198 end up replacing one of these new registers with junk. */
2200 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2201 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2202 #endif
2204 temp = XEXP (temp, 0);
2206 #ifdef POINTERS_EXTEND_UNSIGNED
2207 if (GET_MODE (temp) != GET_MODE (orig))
2208 temp = convert_memory_address (GET_MODE (orig), temp);
2209 #endif
2210 return temp;
2212 else if (GET_CODE (constant) == LABEL_REF)
2213 return XEXP (force_const_mem
2214 (GET_MODE (orig),
2215 copy_rtx_and_substitute (constant, map, for_lhs)),
2218 else if (TREE_CONSTANT_POOL_ADDRESS_P (orig) && inlining)
2219 notice_rtl_inlining_of_deferred_constant ();
2221 return orig;
2223 case CONST_DOUBLE:
2224 /* We have to make a new copy of this CONST_DOUBLE because don't want
2225 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2226 duplicate of a CONST_DOUBLE we have already seen. */
2227 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2229 REAL_VALUE_TYPE d;
2231 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2232 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2234 else
2235 return immed_double_const (CONST_DOUBLE_LOW (orig),
2236 CONST_DOUBLE_HIGH (orig), VOIDmode);
2238 case CONST:
2239 /* Make new constant pool entry for a constant
2240 that was in the pool of the inline function. */
2241 if (RTX_INTEGRATED_P (orig))
2242 abort ();
2243 break;
2245 case ASM_OPERANDS:
2246 /* If a single asm insn contains multiple output operands then
2247 it contains multiple ASM_OPERANDS rtx's that share the input
2248 and constraint vecs. We must make sure that the copied insn
2249 continues to share it. */
2250 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2252 copy = rtx_alloc (ASM_OPERANDS);
2253 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2254 PUT_MODE (copy, GET_MODE (orig));
2255 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2256 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2257 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2258 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2259 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2260 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2261 = map->copy_asm_constraints_vector;
2262 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2263 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2264 return copy;
2266 break;
2268 case CALL:
2269 /* This is given special treatment because the first
2270 operand of a CALL is a (MEM ...) which may get
2271 forced into a register for cse. This is undesirable
2272 if function-address cse isn't wanted or if we won't do cse. */
2273 #ifndef NO_FUNCTION_CSE
2274 if (! (optimize && ! flag_no_function_cse))
2275 #endif
2277 rtx copy
2278 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2279 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2280 map, 0));
2282 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2284 return
2285 gen_rtx_CALL (GET_MODE (orig), copy,
2286 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2288 break;
2290 #if 0
2291 /* Must be ifdefed out for loop unrolling to work. */
2292 case RETURN:
2293 abort ();
2294 #endif
2296 case SET:
2297 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2298 Adjust the setting by the offset of the area we made.
2299 If the nonlocal goto is into the current function,
2300 this will result in unnecessarily bad code, but should work. */
2301 if (SET_DEST (orig) == virtual_stack_vars_rtx
2302 || SET_DEST (orig) == virtual_incoming_args_rtx)
2304 /* In case a translation hasn't occurred already, make one now. */
2305 rtx equiv_reg;
2306 rtx equiv_loc;
2307 HOST_WIDE_INT loc_offset;
2309 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2310 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2311 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2312 REGNO (equiv_reg)).rtx;
2313 loc_offset
2314 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2316 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2317 force_operand
2318 (plus_constant
2319 (copy_rtx_and_substitute (SET_SRC (orig),
2320 map, 0),
2321 - loc_offset),
2322 NULL_RTX));
2324 else
2325 return gen_rtx_SET (VOIDmode,
2326 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2327 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2328 break;
2330 case MEM:
2331 if (inlining
2332 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2333 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2335 enum machine_mode const_mode
2336 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2337 rtx constant
2338 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2340 constant = copy_rtx_and_substitute (constant, map, 0);
2342 /* If this was an address of a constant pool entry that itself
2343 had to be placed in the constant pool, it might not be a
2344 valid address. So the recursive call might have turned it
2345 into a register. In that case, it isn't a constant any
2346 more, so return it. This has the potential of changing a
2347 MEM into a REG, but we'll assume that it safe. */
2348 if (! CONSTANT_P (constant))
2349 return constant;
2351 return validize_mem (force_const_mem (const_mode, constant));
2354 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2355 map, 0));
2356 MEM_COPY_ATTRIBUTES (copy, orig);
2358 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2359 since this may be an indirect reference to a parameter and the
2360 actual may not be readonly. */
2361 if (inlining && !for_lhs)
2362 RTX_UNCHANGING_P (copy) = 0;
2364 /* If inlining, squish aliasing data that references the subroutine's
2365 parameter list, since that's no longer applicable. */
2366 if (inlining && MEM_EXPR (copy)
2367 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2368 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2369 set_mem_expr (copy, NULL_TREE);
2371 return copy;
2373 default:
2374 break;
2377 copy = rtx_alloc (code);
2378 PUT_MODE (copy, mode);
2379 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2380 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2381 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2383 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2385 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2387 switch (*format_ptr++)
2389 case '0':
2390 /* Copy this through the wide int field; that's safest. */
2391 X0WINT (copy, i) = X0WINT (orig, i);
2392 break;
2394 case 'e':
2395 XEXP (copy, i)
2396 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2397 break;
2399 case 'u':
2400 /* Change any references to old-insns to point to the
2401 corresponding copied insns. */
2402 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2403 break;
2405 case 'E':
2406 XVEC (copy, i) = XVEC (orig, i);
2407 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2409 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2410 for (j = 0; j < XVECLEN (copy, i); j++)
2411 XVECEXP (copy, i, j)
2412 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2413 map, for_lhs);
2415 break;
2417 case 'w':
2418 XWINT (copy, i) = XWINT (orig, i);
2419 break;
2421 case 'i':
2422 XINT (copy, i) = XINT (orig, i);
2423 break;
2425 case 's':
2426 XSTR (copy, i) = XSTR (orig, i);
2427 break;
2429 case 't':
2430 XTREE (copy, i) = XTREE (orig, i);
2431 break;
2433 default:
2434 abort ();
2438 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2440 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2441 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2442 map->copy_asm_constraints_vector
2443 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2446 return copy;
2449 /* Substitute known constant values into INSN, if that is valid. */
2451 void
2452 try_constants (insn, map)
2453 rtx insn;
2454 struct inline_remap *map;
2456 int i;
2458 map->num_sets = 0;
2460 /* First try just updating addresses, then other things. This is
2461 important when we have something like the store of a constant
2462 into memory and we can update the memory address but the machine
2463 does not support a constant source. */
2464 subst_constants (&PATTERN (insn), insn, map, 1);
2465 apply_change_group ();
2466 subst_constants (&PATTERN (insn), insn, map, 0);
2467 apply_change_group ();
2469 /* Enforce consistency between the addresses in the regular insn flow
2470 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
2471 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
2473 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
2474 apply_change_group ();
2477 /* Show we don't know the value of anything stored or clobbered. */
2478 note_stores (PATTERN (insn), mark_stores, NULL);
2479 map->last_pc_value = 0;
2480 #ifdef HAVE_cc0
2481 map->last_cc0_value = 0;
2482 #endif
2484 /* Set up any constant equivalences made in this insn. */
2485 for (i = 0; i < map->num_sets; i++)
2487 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2489 int regno = REGNO (map->equiv_sets[i].dest);
2491 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2492 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2493 /* Following clause is a hack to make case work where GNU C++
2494 reassigns a variable to make cse work right. */
2495 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2496 regno).rtx,
2497 map->equiv_sets[i].equiv))
2498 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2499 map->equiv_sets[i].equiv, map->const_age);
2501 else if (map->equiv_sets[i].dest == pc_rtx)
2502 map->last_pc_value = map->equiv_sets[i].equiv;
2503 #ifdef HAVE_cc0
2504 else if (map->equiv_sets[i].dest == cc0_rtx)
2505 map->last_cc0_value = map->equiv_sets[i].equiv;
2506 #endif
2510 /* Substitute known constants for pseudo regs in the contents of LOC,
2511 which are part of INSN.
2512 If INSN is zero, the substitution should always be done (this is used to
2513 update DECL_RTL).
2514 These changes are taken out by try_constants if the result is not valid.
2516 Note that we are more concerned with determining when the result of a SET
2517 is a constant, for further propagation, than actually inserting constants
2518 into insns; cse will do the latter task better.
2520 This function is also used to adjust address of items previously addressed
2521 via the virtual stack variable or virtual incoming arguments registers.
2523 If MEMONLY is nonzero, only make changes inside a MEM. */
2525 static void
2526 subst_constants (loc, insn, map, memonly)
2527 rtx *loc;
2528 rtx insn;
2529 struct inline_remap *map;
2530 int memonly;
2532 rtx x = *loc;
2533 int i, j;
2534 enum rtx_code code;
2535 const char *format_ptr;
2536 int num_changes = num_validated_changes ();
2537 rtx new = 0;
2538 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2540 code = GET_CODE (x);
2542 switch (code)
2544 case PC:
2545 case CONST_INT:
2546 case CONST_DOUBLE:
2547 case CONST_VECTOR:
2548 case SYMBOL_REF:
2549 case CONST:
2550 case LABEL_REF:
2551 case ADDRESS:
2552 return;
2554 #ifdef HAVE_cc0
2555 case CC0:
2556 if (! memonly)
2557 validate_change (insn, loc, map->last_cc0_value, 1);
2558 return;
2559 #endif
2561 case USE:
2562 case CLOBBER:
2563 /* The only thing we can do with a USE or CLOBBER is possibly do
2564 some substitutions in a MEM within it. */
2565 if (GET_CODE (XEXP (x, 0)) == MEM)
2566 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2567 return;
2569 case REG:
2570 /* Substitute for parms and known constants. Don't replace
2571 hard regs used as user variables with constants. */
2572 if (! memonly)
2574 int regno = REGNO (x);
2575 struct const_equiv_data *p;
2577 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2578 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2579 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2580 p->rtx != 0)
2581 && p->age >= map->const_age)
2582 validate_change (insn, loc, p->rtx, 1);
2584 return;
2586 case SUBREG:
2587 /* SUBREG applied to something other than a reg
2588 should be treated as ordinary, since that must
2589 be a special hack and we don't know how to treat it specially.
2590 Consider for example mulsidi3 in m68k.md.
2591 Ordinary SUBREG of a REG needs this special treatment. */
2592 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2594 rtx inner = SUBREG_REG (x);
2595 rtx new = 0;
2597 /* We can't call subst_constants on &SUBREG_REG (x) because any
2598 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2599 see what is inside, try to form the new SUBREG and see if that is
2600 valid. We handle two cases: extracting a full word in an
2601 integral mode and extracting the low part. */
2602 subst_constants (&inner, NULL_RTX, map, 0);
2603 new = simplify_gen_subreg (GET_MODE (x), inner,
2604 GET_MODE (SUBREG_REG (x)),
2605 SUBREG_BYTE (x));
2607 if (new)
2608 validate_change (insn, loc, new, 1);
2609 else
2610 cancel_changes (num_changes);
2612 return;
2614 break;
2616 case MEM:
2617 subst_constants (&XEXP (x, 0), insn, map, 0);
2619 /* If a memory address got spoiled, change it back. */
2620 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2621 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2622 cancel_changes (num_changes);
2623 return;
2625 case SET:
2627 /* Substitute constants in our source, and in any arguments to a
2628 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2629 itself. */
2630 rtx *dest_loc = &SET_DEST (x);
2631 rtx dest = *dest_loc;
2632 rtx src, tem;
2633 enum machine_mode compare_mode = VOIDmode;
2635 /* If SET_SRC is a COMPARE which subst_constants would turn into
2636 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2637 is to be done. */
2638 if (GET_CODE (SET_SRC (x)) == COMPARE)
2640 src = SET_SRC (x);
2641 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2642 || CC0_P (dest))
2644 compare_mode = GET_MODE (XEXP (src, 0));
2645 if (compare_mode == VOIDmode)
2646 compare_mode = GET_MODE (XEXP (src, 1));
2650 subst_constants (&SET_SRC (x), insn, map, memonly);
2651 src = SET_SRC (x);
2653 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2654 || GET_CODE (*dest_loc) == SUBREG
2655 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2657 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2659 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2660 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2662 dest_loc = &XEXP (*dest_loc, 0);
2665 /* Do substitute in the address of a destination in memory. */
2666 if (GET_CODE (*dest_loc) == MEM)
2667 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2669 /* Check for the case of DEST a SUBREG, both it and the underlying
2670 register are less than one word, and the SUBREG has the wider mode.
2671 In the case, we are really setting the underlying register to the
2672 source converted to the mode of DEST. So indicate that. */
2673 if (GET_CODE (dest) == SUBREG
2674 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2675 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2676 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2677 <= GET_MODE_SIZE (GET_MODE (dest)))
2678 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2679 src)))
2680 src = tem, dest = SUBREG_REG (dest);
2682 /* If storing a recognizable value save it for later recording. */
2683 if ((map->num_sets < MAX_RECOG_OPERANDS)
2684 && (CONSTANT_P (src)
2685 || (GET_CODE (src) == REG
2686 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2687 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2688 || (GET_CODE (src) == PLUS
2689 && GET_CODE (XEXP (src, 0)) == REG
2690 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2691 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2692 && CONSTANT_P (XEXP (src, 1)))
2693 || GET_CODE (src) == COMPARE
2694 || CC0_P (dest)
2695 || (dest == pc_rtx
2696 && (src == pc_rtx || GET_CODE (src) == RETURN
2697 || GET_CODE (src) == LABEL_REF))))
2699 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2700 it will cause us to save the COMPARE with any constants
2701 substituted, which is what we want for later. */
2702 rtx src_copy = copy_rtx (src);
2703 map->equiv_sets[map->num_sets].equiv = src_copy;
2704 map->equiv_sets[map->num_sets++].dest = dest;
2705 if (compare_mode != VOIDmode
2706 && GET_CODE (src) == COMPARE
2707 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2708 || CC0_P (dest))
2709 && GET_MODE (XEXP (src, 0)) == VOIDmode
2710 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2712 map->compare_src = src_copy;
2713 map->compare_mode = compare_mode;
2717 return;
2719 default:
2720 break;
2723 format_ptr = GET_RTX_FORMAT (code);
2725 /* If the first operand is an expression, save its mode for later. */
2726 if (*format_ptr == 'e')
2727 op0_mode = GET_MODE (XEXP (x, 0));
2729 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2731 switch (*format_ptr++)
2733 case '0':
2734 break;
2736 case 'e':
2737 if (XEXP (x, i))
2738 subst_constants (&XEXP (x, i), insn, map, memonly);
2739 break;
2741 case 'u':
2742 case 'i':
2743 case 's':
2744 case 'w':
2745 case 'n':
2746 case 't':
2747 case 'B':
2748 break;
2750 case 'E':
2751 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2752 for (j = 0; j < XVECLEN (x, i); j++)
2753 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2755 break;
2757 default:
2758 abort ();
2762 /* If this is a commutative operation, move a constant to the second
2763 operand unless the second operand is already a CONST_INT. */
2764 if (! memonly
2765 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2766 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2768 rtx tem = XEXP (x, 0);
2769 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2770 validate_change (insn, &XEXP (x, 1), tem, 1);
2773 /* Simplify the expression in case we put in some constants. */
2774 if (! memonly)
2775 switch (GET_RTX_CLASS (code))
2777 case '1':
2778 if (op0_mode == MAX_MACHINE_MODE)
2779 abort ();
2780 new = simplify_unary_operation (code, GET_MODE (x),
2781 XEXP (x, 0), op0_mode);
2782 break;
2784 case '<':
2786 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2788 if (op_mode == VOIDmode)
2789 op_mode = GET_MODE (XEXP (x, 1));
2790 new = simplify_relational_operation (code, op_mode,
2791 XEXP (x, 0), XEXP (x, 1));
2792 #ifdef FLOAT_STORE_FLAG_VALUE
2793 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2795 enum machine_mode mode = GET_MODE (x);
2796 if (new == const0_rtx)
2797 new = CONST0_RTX (mode);
2798 else
2800 REAL_VALUE_TYPE val;
2802 /* Avoid automatic aggregate initialization. */
2803 val = FLOAT_STORE_FLAG_VALUE (mode);
2804 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2807 #endif
2808 break;
2811 case '2':
2812 case 'c':
2813 new = simplify_binary_operation (code, GET_MODE (x),
2814 XEXP (x, 0), XEXP (x, 1));
2815 break;
2817 case 'b':
2818 case '3':
2819 if (op0_mode == MAX_MACHINE_MODE)
2820 abort ();
2822 if (code == IF_THEN_ELSE)
2824 rtx op0 = XEXP (x, 0);
2826 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2827 && GET_MODE (op0) == VOIDmode
2828 && ! side_effects_p (op0)
2829 && XEXP (op0, 0) == map->compare_src
2830 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2832 /* We have compare of two VOIDmode constants for which
2833 we recorded the comparison mode. */
2834 rtx temp =
2835 simplify_relational_operation (GET_CODE (op0),
2836 map->compare_mode,
2837 XEXP (op0, 0),
2838 XEXP (op0, 1));
2840 if (temp == const0_rtx)
2841 new = XEXP (x, 2);
2842 else if (temp == const1_rtx)
2843 new = XEXP (x, 1);
2846 if (!new)
2847 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2848 XEXP (x, 0), XEXP (x, 1),
2849 XEXP (x, 2));
2850 break;
2853 if (new)
2854 validate_change (insn, loc, new, 1);
2857 /* Show that register modified no longer contain known constants. We are
2858 called from note_stores with parts of the new insn. */
2860 static void
2861 mark_stores (dest, x, data)
2862 rtx dest;
2863 rtx x ATTRIBUTE_UNUSED;
2864 void *data ATTRIBUTE_UNUSED;
2866 int regno = -1;
2867 enum machine_mode mode = VOIDmode;
2869 /* DEST is always the innermost thing set, except in the case of
2870 SUBREGs of hard registers. */
2872 if (GET_CODE (dest) == REG)
2873 regno = REGNO (dest), mode = GET_MODE (dest);
2874 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2876 regno = REGNO (SUBREG_REG (dest));
2877 if (regno < FIRST_PSEUDO_REGISTER)
2878 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2879 GET_MODE (SUBREG_REG (dest)),
2880 SUBREG_BYTE (dest),
2881 GET_MODE (dest));
2882 mode = GET_MODE (SUBREG_REG (dest));
2885 if (regno >= 0)
2887 unsigned int uregno = regno;
2888 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2889 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2890 unsigned int i;
2892 /* Ignore virtual stack var or virtual arg register since those
2893 are handled separately. */
2894 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2895 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2896 for (i = uregno; i <= last_reg; i++)
2897 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2898 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2902 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2903 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2904 that it points to the node itself, thus indicating that the node is its
2905 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2906 the given node is NULL, recursively descend the decl/block tree which
2907 it is the root of, and for each other ..._DECL or BLOCK node contained
2908 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2909 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2910 values to point to themselves. */
2912 static void
2913 set_block_origin_self (stmt)
2914 tree stmt;
2916 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2918 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2921 tree local_decl;
2923 for (local_decl = BLOCK_VARS (stmt);
2924 local_decl != NULL_TREE;
2925 local_decl = TREE_CHAIN (local_decl))
2926 set_decl_origin_self (local_decl); /* Potential recursion. */
2930 tree subblock;
2932 for (subblock = BLOCK_SUBBLOCKS (stmt);
2933 subblock != NULL_TREE;
2934 subblock = BLOCK_CHAIN (subblock))
2935 set_block_origin_self (subblock); /* Recurse. */
2940 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2941 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2942 node to so that it points to the node itself, thus indicating that the
2943 node represents its own (abstract) origin. Additionally, if the
2944 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2945 the decl/block tree of which the given node is the root of, and for
2946 each other ..._DECL or BLOCK node contained therein whose
2947 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2948 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2949 point to themselves. */
2951 void
2952 set_decl_origin_self (decl)
2953 tree decl;
2955 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2957 DECL_ABSTRACT_ORIGIN (decl) = decl;
2958 if (TREE_CODE (decl) == FUNCTION_DECL)
2960 tree arg;
2962 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2963 DECL_ABSTRACT_ORIGIN (arg) = arg;
2964 if (DECL_INITIAL (decl) != NULL_TREE
2965 && DECL_INITIAL (decl) != error_mark_node)
2966 set_block_origin_self (DECL_INITIAL (decl));
2971 /* Given a pointer to some BLOCK node, and a boolean value to set the
2972 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2973 the given block, and for all local decls and all local sub-blocks
2974 (recursively) which are contained therein. */
2976 static void
2977 set_block_abstract_flags (stmt, setting)
2978 tree stmt;
2979 int setting;
2981 tree local_decl;
2982 tree subblock;
2984 BLOCK_ABSTRACT (stmt) = setting;
2986 for (local_decl = BLOCK_VARS (stmt);
2987 local_decl != NULL_TREE;
2988 local_decl = TREE_CHAIN (local_decl))
2989 set_decl_abstract_flags (local_decl, setting);
2991 for (subblock = BLOCK_SUBBLOCKS (stmt);
2992 subblock != NULL_TREE;
2993 subblock = BLOCK_CHAIN (subblock))
2994 set_block_abstract_flags (subblock, setting);
2997 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2998 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2999 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3000 set the abstract flags for all of the parameters, local vars, local
3001 blocks and sub-blocks (recursively) to the same setting. */
3003 void
3004 set_decl_abstract_flags (decl, setting)
3005 tree decl;
3006 int setting;
3008 DECL_ABSTRACT (decl) = setting;
3009 if (TREE_CODE (decl) == FUNCTION_DECL)
3011 tree arg;
3013 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3014 DECL_ABSTRACT (arg) = setting;
3015 if (DECL_INITIAL (decl) != NULL_TREE
3016 && DECL_INITIAL (decl) != error_mark_node)
3017 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3021 /* Output the assembly language code for the function FNDECL
3022 from its DECL_SAVED_INSNS. Used for inline functions that are output
3023 at end of compilation instead of where they came in the source. */
3025 static GTY(()) struct function *old_cfun;
3027 void
3028 output_inline_function (fndecl)
3029 tree fndecl;
3031 enum debug_info_type old_write_symbols = write_symbols;
3032 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3033 struct function *f = DECL_SAVED_INSNS (fndecl);
3035 old_cfun = cfun;
3036 cfun = f;
3037 current_function_decl = fndecl;
3039 set_new_last_label_num (f->inl_max_label_num);
3041 /* We're not deferring this any longer. */
3042 DECL_DEFER_OUTPUT (fndecl) = 0;
3044 /* If requested, suppress debugging information. */
3045 if (f->no_debugging_symbols)
3047 write_symbols = NO_DEBUG;
3048 debug_hooks = &do_nothing_debug_hooks;
3051 /* Make sure warnings emitted by the optimizers (e.g. control reaches
3052 end of non-void function) is not wildly incorrect. */
3053 input_location = DECL_SOURCE_LOCATION (fndecl);
3055 /* Compile this function all the way down to assembly code. As a
3056 side effect this destroys the saved RTL representation, but
3057 that's okay, because we don't need to inline this anymore. */
3058 rest_of_compilation (fndecl);
3059 DECL_INLINE (fndecl) = 0;
3061 cfun = old_cfun;
3062 current_function_decl = old_cfun ? old_cfun->decl : 0;
3063 write_symbols = old_write_symbols;
3064 debug_hooks = old_debug_hooks;
3068 /* Functions to keep track of the values hard regs had at the start of
3069 the function. */
3072 get_hard_reg_initial_reg (fun, reg)
3073 struct function *fun;
3074 rtx reg;
3076 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3077 int i;
3079 if (ivs == 0)
3080 return NULL_RTX;
3082 for (i = 0; i < ivs->num_entries; i++)
3083 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3084 return ivs->entries[i].hard_reg;
3086 return NULL_RTX;
3090 has_func_hard_reg_initial_val (fun, reg)
3091 struct function *fun;
3092 rtx reg;
3094 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3095 int i;
3097 if (ivs == 0)
3098 return NULL_RTX;
3100 for (i = 0; i < ivs->num_entries; i++)
3101 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3102 return ivs->entries[i].pseudo;
3104 return NULL_RTX;
3108 get_func_hard_reg_initial_val (fun, reg)
3109 struct function *fun;
3110 rtx reg;
3112 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3113 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3115 if (rv)
3116 return rv;
3118 if (ivs == 0)
3120 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3121 ivs = fun->hard_reg_initial_vals;
3122 ivs->num_entries = 0;
3123 ivs->max_entries = 5;
3124 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3127 if (ivs->num_entries >= ivs->max_entries)
3129 ivs->max_entries += 5;
3130 ivs->entries =
3131 (initial_value_pair *) ggc_realloc (ivs->entries,
3132 ivs->max_entries
3133 * sizeof (initial_value_pair));
3136 ivs->entries[ivs->num_entries].hard_reg = reg;
3137 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3139 return ivs->entries[ivs->num_entries++].pseudo;
3143 get_hard_reg_initial_val (mode, regno)
3144 enum machine_mode mode;
3145 int regno;
3147 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3151 has_hard_reg_initial_val (mode, regno)
3152 enum machine_mode mode;
3153 int regno;
3155 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3158 static void
3159 setup_initial_hard_reg_value_integration (inl_f, remap)
3160 struct function *inl_f;
3161 struct inline_remap *remap;
3163 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3164 int i;
3166 if (ivs == 0)
3167 return;
3169 for (i = 0; i < ivs->num_entries; i ++)
3170 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3171 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3175 void
3176 emit_initial_value_sets ()
3178 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3179 int i;
3180 rtx seq;
3182 if (ivs == 0)
3183 return;
3185 start_sequence ();
3186 for (i = 0; i < ivs->num_entries; i++)
3187 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3188 seq = get_insns ();
3189 end_sequence ();
3191 emit_insn_after (seq, get_insns ());
3194 /* If the backend knows where to allocate pseudos for hard
3195 register initial values, register these allocations now. */
3196 void
3197 allocate_initial_values (reg_equiv_memory_loc)
3198 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3200 #ifdef ALLOCATE_INITIAL_VALUE
3201 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3202 int i;
3204 if (ivs == 0)
3205 return;
3207 for (i = 0; i < ivs->num_entries; i++)
3209 int regno = REGNO (ivs->entries[i].pseudo);
3210 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3212 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3213 ; /* Do nothing. */
3214 else if (GET_CODE (x) == MEM)
3215 reg_equiv_memory_loc[regno] = x;
3216 else if (GET_CODE (x) == REG)
3218 reg_renumber[regno] = REGNO (x);
3219 /* Poke the regno right into regno_reg_rtx
3220 so that even fixed regs are accepted. */
3221 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3223 else abort ();
3225 #endif
3228 #include "gt-integrate.h"