* config/xtensa/linux.h (TARGET_OS_CPP_BUILTINS): Remove definition of
[official-gcc.git] / gcc / integrate.c
blob60fa2acd13a6609688be8bb9bf531a42099084ad
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
77 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
79 static rtvec initialize_for_inline PARAMS ((tree));
80 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
81 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
82 rtvec));
83 static tree integrate_decl_tree PARAMS ((tree,
84 struct inline_remap *));
85 static void subst_constants PARAMS ((rtx *, rtx,
86 struct inline_remap *, int));
87 static void set_block_origin_self PARAMS ((tree));
88 static void set_block_abstract_flags PARAMS ((tree, int));
89 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
90 rtx));
91 void set_decl_abstract_flags PARAMS ((tree, int));
92 static void mark_stores PARAMS ((rtx, rtx, void *));
93 static void save_parm_insns PARAMS ((rtx, rtx));
94 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
95 rtx));
96 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
97 int));
98 static int compare_blocks PARAMS ((const PTR, const PTR));
99 static int find_block PARAMS ((const PTR, const PTR));
101 /* Used by copy_rtx_and_substitute; this indicates whether the function is
102 called for the purpose of inlining or some other purpose (i.e. loop
103 unrolling). This affects how constant pool references are handled.
104 This variable contains the FUNCTION_DECL for the inlined function. */
105 static struct function *inlining = 0;
107 /* Returns the Ith entry in the label_map contained in MAP. If the
108 Ith entry has not yet been set, return a fresh label. This function
109 performs a lazy initialization of label_map, thereby avoiding huge memory
110 explosions when the label_map gets very large. */
113 get_label_from_map (map, i)
114 struct inline_remap *map;
115 int i;
117 rtx x = map->label_map[i];
119 if (x == NULL_RTX)
120 x = map->label_map[i] = gen_label_rtx ();
122 return x;
125 /* Return false if the function FNDECL cannot be inlined on account of its
126 attributes, true otherwise. */
127 bool
128 function_attribute_inlinable_p (fndecl)
129 tree fndecl;
131 if (targetm.attribute_table)
133 tree a;
135 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
137 tree name = TREE_PURPOSE (a);
138 int i;
140 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
141 if (is_attribute_p (targetm.attribute_table[i].name, name))
142 return (*targetm.function_attribute_inlinable_p) (fndecl);
146 return true;
149 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
150 is safe and reasonable to integrate into other functions.
151 Nonzero means value is a warning msgid with a single %s
152 for the function's name. */
154 const char *
155 function_cannot_inline_p (fndecl)
156 tree fndecl;
158 rtx insn;
159 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
161 /* For functions marked as inline increase the maximum size to
162 MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
163 regular functions use the limit given by INTEGRATE_THRESHOLD.
164 Note that the RTL inliner is not used by the languages that use
165 the tree inliner (C, C++). */
167 int max_insns = (DECL_INLINE (fndecl))
168 ? (MAX_INLINE_INSNS_RTL
169 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
170 : INTEGRATE_THRESHOLD (fndecl);
172 int ninsns = 0;
173 tree parms;
175 if (DECL_UNINLINABLE (fndecl))
176 return N_("function cannot be inline");
178 /* No inlines with varargs. */
179 if (last && TREE_VALUE (last) != void_type_node)
180 return N_("varargs function cannot be inline");
182 if (current_function_calls_alloca)
183 return N_("function using alloca cannot be inline");
185 if (current_function_calls_setjmp)
186 return N_("function using setjmp cannot be inline");
188 if (current_function_calls_eh_return)
189 return N_("function uses __builtin_eh_return");
191 if (current_function_contains_functions)
192 return N_("function with nested functions cannot be inline");
194 if (forced_labels)
195 return
196 N_("function with label addresses used in initializers cannot inline");
198 if (current_function_cannot_inline)
199 return current_function_cannot_inline;
201 /* If its not even close, don't even look. */
202 if (get_max_uid () > 3 * max_insns)
203 return N_("function too large to be inline");
205 #if 0
206 /* Don't inline functions which do not specify a function prototype and
207 have BLKmode argument or take the address of a parameter. */
208 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
210 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
211 TREE_ADDRESSABLE (parms) = 1;
212 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
213 return N_("no prototype, and parameter address used; cannot be inline");
215 #endif
217 /* We can't inline functions that return structures
218 the old-fashioned PCC way, copying into a static block. */
219 if (current_function_returns_pcc_struct)
220 return N_("inline functions not supported for this return value type");
222 /* We can't inline functions that return structures of varying size. */
223 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
224 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
225 return N_("function with varying-size return value cannot be inline");
227 /* Cannot inline a function with a varying size argument or one that
228 receives a transparent union. */
229 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
231 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
232 return N_("function with varying-size parameter cannot be inline");
233 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
234 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
235 return N_("function with transparent unit parameter cannot be inline");
238 if (get_max_uid () > max_insns)
240 for (ninsns = 0, insn = get_first_nonparm_insn ();
241 insn && ninsns < max_insns;
242 insn = NEXT_INSN (insn))
243 if (INSN_P (insn))
244 ninsns++;
246 if (ninsns >= max_insns)
247 return N_("function too large to be inline");
250 /* We will not inline a function which uses computed goto. The addresses of
251 its local labels, which may be tucked into global storage, are of course
252 not constant across instantiations, which causes unexpected behavior. */
253 if (current_function_has_computed_jump)
254 return N_("function with computed jump cannot inline");
256 /* We cannot inline a nested function that jumps to a nonlocal label. */
257 if (current_function_has_nonlocal_goto)
258 return N_("function with nonlocal goto cannot be inline");
260 /* We can't inline functions that return a PARALLEL rtx. */
261 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
263 rtx result = DECL_RTL (DECL_RESULT (fndecl));
264 if (GET_CODE (result) == PARALLEL)
265 return N_("inline functions not supported for this return value type");
268 /* If the function has a target specific attribute attached to it,
269 then we assume that we should not inline it. This can be overridden
270 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
271 if (!function_attribute_inlinable_p (fndecl))
272 return N_("function with target specific attribute(s) cannot be inlined");
274 return NULL;
277 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
278 Zero for a reg that isn't a parm's home.
279 Only reg numbers less than max_parm_reg are mapped here. */
280 static tree *parmdecl_map;
282 /* In save_for_inline, nonzero if past the parm-initialization insns. */
283 static int in_nonparm_insns;
285 /* Subroutine for `save_for_inline'. Performs initialization
286 needed to save FNDECL's insns and info for future inline expansion. */
288 static rtvec
289 initialize_for_inline (fndecl)
290 tree fndecl;
292 int i;
293 rtvec arg_vector;
294 tree parms;
296 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
297 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
298 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
300 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
301 parms;
302 parms = TREE_CHAIN (parms), i++)
304 rtx p = DECL_RTL (parms);
306 /* If we have (mem (addressof (mem ...))), use the inner MEM since
307 otherwise the copy_rtx call below will not unshare the MEM since
308 it shares ADDRESSOF. */
309 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
310 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
311 p = XEXP (XEXP (p, 0), 0);
313 RTVEC_ELT (arg_vector, i) = p;
315 if (GET_CODE (p) == REG)
316 parmdecl_map[REGNO (p)] = parms;
317 else if (GET_CODE (p) == CONCAT)
319 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
320 rtx pimag = gen_imagpart (GET_MODE (preal), p);
322 if (GET_CODE (preal) == REG)
323 parmdecl_map[REGNO (preal)] = parms;
324 if (GET_CODE (pimag) == REG)
325 parmdecl_map[REGNO (pimag)] = parms;
328 /* This flag is cleared later
329 if the function ever modifies the value of the parm. */
330 TREE_READONLY (parms) = 1;
333 return arg_vector;
336 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
337 originally was in the FROM_FN, but now it will be in the
338 TO_FN. */
340 tree
341 copy_decl_for_inlining (decl, from_fn, to_fn)
342 tree decl;
343 tree from_fn;
344 tree to_fn;
346 tree copy;
348 /* Copy the declaration. */
349 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
351 tree type;
352 int invisiref = 0;
354 /* See if the frontend wants to pass this by invisible reference. */
355 if (TREE_CODE (decl) == PARM_DECL
356 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
357 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
358 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
360 invisiref = 1;
361 type = DECL_ARG_TYPE (decl);
363 else
364 type = TREE_TYPE (decl);
366 /* For a parameter, we must make an equivalent VAR_DECL, not a
367 new PARM_DECL. */
368 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
369 if (!invisiref)
371 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
372 TREE_READONLY (copy) = TREE_READONLY (decl);
373 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
375 else
377 TREE_ADDRESSABLE (copy) = 0;
378 TREE_READONLY (copy) = 1;
379 TREE_THIS_VOLATILE (copy) = 0;
382 else
384 copy = copy_node (decl);
385 (*lang_hooks.dup_lang_specific_decl) (copy);
387 /* TREE_ADDRESSABLE isn't used to indicate that a label's
388 address has been taken; it's for internal bookkeeping in
389 expand_goto_internal. */
390 if (TREE_CODE (copy) == LABEL_DECL)
391 TREE_ADDRESSABLE (copy) = 0;
394 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
395 declaration inspired this copy. */
396 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
398 /* The new variable/label has no RTL, yet. */
399 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
400 SET_DECL_RTL (copy, NULL_RTX);
402 /* These args would always appear unused, if not for this. */
403 TREE_USED (copy) = 1;
405 /* Set the context for the new declaration. */
406 if (!DECL_CONTEXT (decl))
407 /* Globals stay global. */
409 else if (DECL_CONTEXT (decl) != from_fn)
410 /* Things that weren't in the scope of the function we're inlining
411 from aren't in the scope we're inlining to, either. */
413 else if (TREE_STATIC (decl))
414 /* Function-scoped static variables should stay in the original
415 function. */
417 else
418 /* Ordinary automatic local variables are now in the scope of the
419 new function. */
420 DECL_CONTEXT (copy) = to_fn;
422 return copy;
425 /* Make the insns and PARM_DECLs of the current function permanent
426 and record other information in DECL_SAVED_INSNS to allow inlining
427 of this function in subsequent calls.
429 This routine need not copy any insns because we are not going
430 to immediately compile the insns in the insn chain. There
431 are two cases when we would compile the insns for FNDECL:
432 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
433 be output at the end of other compilation, because somebody took
434 its address. In the first case, the insns of FNDECL are copied
435 as it is expanded inline, so FNDECL's saved insns are not
436 modified. In the second case, FNDECL is used for the last time,
437 so modifying the rtl is not a problem.
439 We don't have to worry about FNDECL being inline expanded by
440 other functions which are written at the end of compilation
441 because flag_no_inline is turned on when we begin writing
442 functions at the end of compilation. */
444 void
445 save_for_inline (fndecl)
446 tree fndecl;
448 rtx insn;
449 rtvec argvec;
450 rtx first_nonparm_insn;
452 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
453 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
454 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
455 for the parms, prior to elimination of virtual registers.
456 These values are needed for substituting parms properly. */
457 if (! flag_no_inline)
458 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
460 /* Make and emit a return-label if we have not already done so. */
462 if (return_label == 0)
464 return_label = gen_label_rtx ();
465 emit_label (return_label);
468 if (! flag_no_inline)
469 argvec = initialize_for_inline (fndecl);
470 else
471 argvec = NULL;
473 /* Delete basic block notes created by early run of find_basic_block.
474 The notes would be later used by find_basic_blocks to reuse the memory
475 for basic_block structures on already freed obstack. */
476 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
477 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
478 delete_related_insns (insn);
480 /* If there are insns that copy parms from the stack into pseudo registers,
481 those insns are not copied. `expand_inline_function' must
482 emit the correct code to handle such things. */
484 insn = get_insns ();
485 if (GET_CODE (insn) != NOTE)
486 abort ();
488 if (! flag_no_inline)
490 /* Get the insn which signals the end of parameter setup code. */
491 first_nonparm_insn = get_first_nonparm_insn ();
493 /* Now just scan the chain of insns to see what happens to our
494 PARM_DECLs. If a PARM_DECL is used but never modified, we
495 can substitute its rtl directly when expanding inline (and
496 perform constant folding when its incoming value is
497 constant). Otherwise, we have to copy its value into a new
498 register and track the new register's life. */
499 in_nonparm_insns = 0;
500 save_parm_insns (insn, first_nonparm_insn);
502 cfun->inl_max_label_num = max_label_num ();
503 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
504 cfun->original_arg_vector = argvec;
506 cfun->original_decl_initial = DECL_INITIAL (fndecl);
507 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
508 DECL_SAVED_INSNS (fndecl) = cfun;
510 /* Clean up. */
511 if (! flag_no_inline)
512 free (parmdecl_map);
515 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
516 PARM_DECL is used but never modified, we can substitute its rtl directly
517 when expanding inline (and perform constant folding when its incoming
518 value is constant). Otherwise, we have to copy its value into a new
519 register and track the new register's life. */
521 static void
522 save_parm_insns (insn, first_nonparm_insn)
523 rtx insn;
524 rtx first_nonparm_insn;
526 if (insn == NULL_RTX)
527 return;
529 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
531 if (insn == first_nonparm_insn)
532 in_nonparm_insns = 1;
534 if (INSN_P (insn))
536 /* Record what interesting things happen to our parameters. */
537 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
539 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
540 three attached sequences: normal call, sibling call and tail
541 recursion. */
542 if (GET_CODE (insn) == CALL_INSN
543 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
545 int i;
547 for (i = 0; i < 3; i++)
548 save_parm_insns (XEXP (PATTERN (insn), i),
549 first_nonparm_insn);
555 /* Note whether a parameter is modified or not. */
557 static void
558 note_modified_parmregs (reg, x, data)
559 rtx reg;
560 rtx x ATTRIBUTE_UNUSED;
561 void *data ATTRIBUTE_UNUSED;
563 if (GET_CODE (reg) == REG && in_nonparm_insns
564 && REGNO (reg) < max_parm_reg
565 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
566 && parmdecl_map[REGNO (reg)] != 0)
567 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
570 /* Unfortunately, we need a global copy of const_equiv map for communication
571 with a function called from note_stores. Be *very* careful that this
572 is used properly in the presence of recursion. */
574 varray_type global_const_equiv_varray;
576 #define FIXED_BASE_PLUS_P(X) \
577 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
578 && GET_CODE (XEXP (X, 0)) == REG \
579 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
580 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
582 /* Called to set up a mapping for the case where a parameter is in a
583 register. If it is read-only and our argument is a constant, set up the
584 constant equivalence.
586 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
587 if it is a register.
589 Also, don't allow hard registers here; they might not be valid when
590 substituted into insns. */
591 static void
592 process_reg_param (map, loc, copy)
593 struct inline_remap *map;
594 rtx loc, copy;
596 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
597 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
598 && ! REG_USERVAR_P (copy))
599 || (GET_CODE (copy) == REG
600 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
602 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
603 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
604 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
605 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
606 copy = temp;
608 map->reg_map[REGNO (loc)] = copy;
611 /* Compare two BLOCKs for qsort. The key we sort on is the
612 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
613 two pointers, because it may overflow sizeof(int). */
615 static int
616 compare_blocks (v1, v2)
617 const PTR v1;
618 const PTR v2;
620 tree b1 = *((const tree *) v1);
621 tree b2 = *((const tree *) v2);
622 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
623 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
625 if (p1 == p2)
626 return 0;
627 return p1 < p2 ? -1 : 1;
630 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
631 an original block; the second to a remapped equivalent. */
633 static int
634 find_block (v1, v2)
635 const PTR v1;
636 const PTR v2;
638 const union tree_node *b1 = (const union tree_node *) v1;
639 tree b2 = *((const tree *) v2);
640 char *p1 = (char *) b1;
641 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
643 if (p1 == p2)
644 return 0;
645 return p1 < p2 ? -1 : 1;
648 /* Integrate the procedure defined by FNDECL. Note that this function
649 may wind up calling itself. Since the static variables are not
650 reentrant, we do not assign them until after the possibility
651 of recursion is eliminated.
653 If IGNORE is nonzero, do not produce a value.
654 Otherwise store the value in TARGET if it is nonzero and that is convenient.
656 Value is:
657 (rtx)-1 if we could not substitute the function
658 0 if we substituted it and it does not produce a value
659 else an rtx for where the value is stored. */
662 expand_inline_function (fndecl, parms, target, ignore, type,
663 structure_value_addr)
664 tree fndecl, parms;
665 rtx target;
666 int ignore;
667 tree type;
668 rtx structure_value_addr;
670 struct function *inlining_previous;
671 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
672 tree formal, actual, block;
673 rtx parm_insns = inl_f->emit->x_first_insn;
674 rtx insns = (inl_f->inl_last_parm_insn
675 ? NEXT_INSN (inl_f->inl_last_parm_insn)
676 : parm_insns);
677 tree *arg_trees;
678 rtx *arg_vals;
679 int max_regno;
680 int i;
681 int min_labelno = inl_f->emit->x_first_label_num;
682 int max_labelno = inl_f->inl_max_label_num;
683 int nargs;
684 rtx loc;
685 rtx stack_save = 0;
686 rtx temp;
687 struct inline_remap *map = 0;
688 rtvec arg_vector = inl_f->original_arg_vector;
689 rtx static_chain_value = 0;
690 int inl_max_uid;
691 int eh_region_offset;
693 /* The pointer used to track the true location of the memory used
694 for MAP->LABEL_MAP. */
695 rtx *real_label_map = 0;
697 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
698 max_regno = inl_f->emit->x_reg_rtx_no + 3;
699 if (max_regno < FIRST_PSEUDO_REGISTER)
700 abort ();
702 /* Pull out the decl for the function definition; fndecl may be a
703 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
704 fndecl = inl_f->decl;
706 nargs = list_length (DECL_ARGUMENTS (fndecl));
708 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
709 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
711 /* Check that the parms type match and that sufficient arguments were
712 passed. Since the appropriate conversions or default promotions have
713 already been applied, the machine modes should match exactly. */
715 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
716 formal;
717 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
719 tree arg;
720 enum machine_mode mode;
722 if (actual == 0)
723 return (rtx) (size_t) -1;
725 arg = TREE_VALUE (actual);
726 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
728 if (arg == error_mark_node
729 || mode != TYPE_MODE (TREE_TYPE (arg))
730 /* If they are block mode, the types should match exactly.
731 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
732 which could happen if the parameter has incomplete type. */
733 || (mode == BLKmode
734 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
735 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
736 return (rtx) (size_t) -1;
739 /* If there is a TARGET which is a readonly BLKmode MEM and DECL_RESULT
740 is also a mem, we are going to lose the readonly on the stores, so don't
741 inline. */
742 if (target != 0 && GET_CODE (target) == MEM && GET_MODE (target) == BLKmode
743 && RTX_UNCHANGING_P (target) && DECL_RTL_SET_P (DECL_RESULT (fndecl))
744 && GET_CODE (DECL_RTL (DECL_RESULT (fndecl))) == MEM)
745 return (rtx) (size_t) -1;
747 /* Extra arguments are valid, but will be ignored below, so we must
748 evaluate them here for side-effects. */
749 for (; actual; actual = TREE_CHAIN (actual))
750 expand_expr (TREE_VALUE (actual), const0_rtx,
751 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
753 /* Expand the function arguments. Do this first so that any
754 new registers get created before we allocate the maps. */
756 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
757 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
759 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
760 formal;
761 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
763 /* Actual parameter, converted to the type of the argument within the
764 function. */
765 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
766 /* Mode of the variable used within the function. */
767 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
768 int invisiref = 0;
770 arg_trees[i] = arg;
771 loc = RTVEC_ELT (arg_vector, i);
773 /* If this is an object passed by invisible reference, we copy the
774 object into a stack slot and save its address. If this will go
775 into memory, we do nothing now. Otherwise, we just expand the
776 argument. */
777 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
778 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
780 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
782 store_expr (arg, stack_slot, 0);
783 arg_vals[i] = XEXP (stack_slot, 0);
784 invisiref = 1;
786 else if (GET_CODE (loc) != MEM)
788 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
790 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
791 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
793 pmode = promote_mode (TREE_TYPE (formal), pmode,
794 &unsignedp, 0);
796 if (GET_MODE (loc) != pmode)
797 abort ();
799 /* The mode if LOC and ARG can differ if LOC was a variable
800 that had its mode promoted via PROMOTED_MODE. */
801 arg_vals[i] = convert_modes (pmode,
802 TYPE_MODE (TREE_TYPE (arg)),
803 expand_expr (arg, NULL_RTX, mode,
804 EXPAND_SUM),
805 unsignedp);
807 else
808 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
810 else
811 arg_vals[i] = 0;
813 /* If the formal type was const but the actual was not, we might
814 end up here with an rtx wrongly tagged unchanging in the caller's
815 context. Fix that. */
816 if (arg_vals[i] != 0
817 && (GET_CODE (arg_vals[i]) == REG || GET_CODE (arg_vals[i]) == MEM)
818 && ! TREE_READONLY (TREE_VALUE (actual)))
819 RTX_UNCHANGING_P (arg_vals[i]) = 0;
821 if (arg_vals[i] != 0
822 && (! TREE_READONLY (formal)
823 /* If the parameter is not read-only, copy our argument through
824 a register. Also, we cannot use ARG_VALS[I] if it overlaps
825 TARGET in any way. In the inline function, they will likely
826 be two different pseudos, and `safe_from_p' will make all
827 sorts of smart assumptions about their not conflicting.
828 But if ARG_VALS[I] overlaps TARGET, these assumptions are
829 wrong, so put ARG_VALS[I] into a fresh register.
830 Don't worry about invisible references, since their stack
831 temps will never overlap the target. */
832 || (target != 0
833 && ! invisiref
834 && (GET_CODE (arg_vals[i]) == REG
835 || GET_CODE (arg_vals[i]) == SUBREG
836 || GET_CODE (arg_vals[i]) == MEM)
837 && reg_overlap_mentioned_p (arg_vals[i], target))
838 /* ??? We must always copy a SUBREG into a REG, because it might
839 get substituted into an address, and not all ports correctly
840 handle SUBREGs in addresses. */
841 || (GET_CODE (arg_vals[i]) == SUBREG)))
842 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
844 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
845 && POINTER_TYPE_P (TREE_TYPE (formal)))
846 mark_reg_pointer (arg_vals[i],
847 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
850 /* Allocate the structures we use to remap things. */
852 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
853 map->fndecl = fndecl;
855 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
856 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
858 /* We used to use alloca here, but the size of what it would try to
859 allocate would occasionally cause it to exceed the stack limit and
860 cause unpredictable core dumps. */
861 real_label_map
862 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
863 map->label_map = real_label_map;
864 map->local_return_label = NULL_RTX;
866 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
867 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
868 map->min_insnno = 0;
869 map->max_insnno = inl_max_uid;
871 map->integrating = 1;
872 map->compare_src = NULL_RTX;
873 map->compare_mode = VOIDmode;
875 /* const_equiv_varray maps pseudos in our routine to constants, so
876 it needs to be large enough for all our pseudos. This is the
877 number we are currently using plus the number in the called
878 routine, plus 15 for each arg, five to compute the virtual frame
879 pointer, and five for the return value. This should be enough
880 for most cases. We do not reference entries outside the range of
881 the map.
883 ??? These numbers are quite arbitrary and were obtained by
884 experimentation. At some point, we should try to allocate the
885 table after all the parameters are set up so we can more accurately
886 estimate the number of pseudos we will need. */
888 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
889 (max_reg_num ()
890 + (max_regno - FIRST_PSEUDO_REGISTER)
891 + 15 * nargs
892 + 10),
893 "expand_inline_function");
894 map->const_age = 0;
896 /* Record the current insn in case we have to set up pointers to frame
897 and argument memory blocks. If there are no insns yet, add a dummy
898 insn that can be used as an insertion point. */
899 map->insns_at_start = get_last_insn ();
900 if (map->insns_at_start == 0)
901 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
903 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
904 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
906 /* Update the outgoing argument size to allow for those in the inlined
907 function. */
908 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
909 current_function_outgoing_args_size = inl_f->outgoing_args_size;
911 /* If the inline function needs to make PIC references, that means
912 that this function's PIC offset table must be used. */
913 if (inl_f->uses_pic_offset_table)
914 current_function_uses_pic_offset_table = 1;
916 /* If this function needs a context, set it up. */
917 if (inl_f->needs_context)
918 static_chain_value = lookup_static_chain (fndecl);
920 /* If the inlined function calls __builtin_constant_p, then we'll
921 need to call purge_builtin_constant_p on this function. */
922 if (inl_f->calls_constant_p)
923 current_function_calls_constant_p = 1;
925 if (GET_CODE (parm_insns) == NOTE
926 && NOTE_LINE_NUMBER (parm_insns) > 0)
928 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
929 NOTE_LINE_NUMBER (parm_insns));
930 if (note)
931 RTX_INTEGRATED_P (note) = 1;
934 /* Process each argument. For each, set up things so that the function's
935 reference to the argument will refer to the argument being passed.
936 We only replace REG with REG here. Any simplifications are done
937 via const_equiv_map.
939 We make two passes: In the first, we deal with parameters that will
940 be placed into registers, since we need to ensure that the allocated
941 register number fits in const_equiv_map. Then we store all non-register
942 parameters into their memory location. */
944 /* Don't try to free temp stack slots here, because we may put one of the
945 parameters into a temp stack slot. */
947 for (i = 0; i < nargs; i++)
949 rtx copy = arg_vals[i];
951 loc = RTVEC_ELT (arg_vector, i);
953 /* There are three cases, each handled separately. */
954 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
955 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
957 /* This must be an object passed by invisible reference (it could
958 also be a variable-sized object, but we forbid inlining functions
959 with variable-sized arguments). COPY is the address of the
960 actual value (this computation will cause it to be copied). We
961 map that address for the register, noting the actual address as
962 an equivalent in case it can be substituted into the insns. */
964 if (GET_CODE (copy) != REG)
966 temp = copy_addr_to_reg (copy);
967 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
968 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
969 copy = temp;
971 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
973 else if (GET_CODE (loc) == MEM)
975 /* This is the case of a parameter that lives in memory. It
976 will live in the block we allocate in the called routine's
977 frame that simulates the incoming argument area. Do nothing
978 with the parameter now; we will call store_expr later. In
979 this case, however, we must ensure that the virtual stack and
980 incoming arg rtx values are expanded now so that we can be
981 sure we have enough slots in the const equiv map since the
982 store_expr call can easily blow the size estimate. */
983 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
984 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
986 else if (GET_CODE (loc) == REG)
987 process_reg_param (map, loc, copy);
988 else if (GET_CODE (loc) == CONCAT)
990 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
991 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
992 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
993 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
995 process_reg_param (map, locreal, copyreal);
996 process_reg_param (map, locimag, copyimag);
998 else
999 abort ();
1002 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
1003 specially. This function can be called recursively, so we need to
1004 save the previous value. */
1005 inlining_previous = inlining;
1006 inlining = inl_f;
1008 /* Now do the parameters that will be placed in memory. */
1010 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1011 formal; formal = TREE_CHAIN (formal), i++)
1013 loc = RTVEC_ELT (arg_vector, i);
1015 if (GET_CODE (loc) == MEM
1016 /* Exclude case handled above. */
1017 && ! (GET_CODE (XEXP (loc, 0)) == REG
1018 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1020 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1021 DECL_SOURCE_LINE (formal));
1022 if (note)
1023 RTX_INTEGRATED_P (note) = 1;
1025 /* Compute the address in the area we reserved and store the
1026 value there. */
1027 temp = copy_rtx_and_substitute (loc, map, 1);
1028 subst_constants (&temp, NULL_RTX, map, 1);
1029 apply_change_group ();
1030 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1031 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1032 store_expr (arg_trees[i], temp, 0);
1036 /* Deal with the places that the function puts its result.
1037 We are driven by what is placed into DECL_RESULT.
1039 Initially, we assume that we don't have anything special handling for
1040 REG_FUNCTION_RETURN_VALUE_P. */
1042 map->inline_target = 0;
1043 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1044 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1046 if (TYPE_MODE (type) == VOIDmode)
1047 /* There is no return value to worry about. */
1049 else if (GET_CODE (loc) == MEM)
1051 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1053 temp = copy_rtx_and_substitute (loc, map, 1);
1054 subst_constants (&temp, NULL_RTX, map, 1);
1055 apply_change_group ();
1056 target = temp;
1058 else
1060 if (! structure_value_addr
1061 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1062 abort ();
1064 /* Pass the function the address in which to return a structure
1065 value. Note that a constructor can cause someone to call us
1066 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1067 via the first parameter, rather than the struct return address.
1069 We have two cases: If the address is a simple register
1070 indirect, use the mapping mechanism to point that register to
1071 our structure return address. Otherwise, store the structure
1072 return value into the place that it will be referenced from. */
1074 if (GET_CODE (XEXP (loc, 0)) == REG)
1076 temp = force_operand (structure_value_addr, NULL_RTX);
1077 temp = force_reg (Pmode, temp);
1078 /* A virtual register might be invalid in an insn, because
1079 it can cause trouble in reload. Since we don't have access
1080 to the expanders at map translation time, make sure we have
1081 a proper register now.
1082 If a virtual register is actually valid, cse or combine
1083 can put it into the mapped insns. */
1084 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1085 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1086 temp = copy_to_mode_reg (Pmode, temp);
1087 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1089 if (CONSTANT_P (structure_value_addr)
1090 || GET_CODE (structure_value_addr) == ADDRESSOF
1091 || (GET_CODE (structure_value_addr) == PLUS
1092 && (XEXP (structure_value_addr, 0)
1093 == virtual_stack_vars_rtx)
1094 && (GET_CODE (XEXP (structure_value_addr, 1))
1095 == CONST_INT)))
1097 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1098 CONST_AGE_PARM);
1101 else
1103 temp = copy_rtx_and_substitute (loc, map, 1);
1104 subst_constants (&temp, NULL_RTX, map, 0);
1105 apply_change_group ();
1106 emit_move_insn (temp, structure_value_addr);
1110 else if (ignore)
1111 /* We will ignore the result value, so don't look at its structure.
1112 Note that preparations for an aggregate return value
1113 do need to be made (above) even if it will be ignored. */
1115 else if (GET_CODE (loc) == REG)
1117 /* The function returns an object in a register and we use the return
1118 value. Set up our target for remapping. */
1120 /* Machine mode function was declared to return. */
1121 enum machine_mode departing_mode = TYPE_MODE (type);
1122 /* (Possibly wider) machine mode it actually computes
1123 (for the sake of callers that fail to declare it right).
1124 We have to use the mode of the result's RTL, rather than
1125 its type, since expand_function_start may have promoted it. */
1126 enum machine_mode arriving_mode
1127 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1128 rtx reg_to_map;
1130 /* Don't use MEMs as direct targets because on some machines
1131 substituting a MEM for a REG makes invalid insns.
1132 Let the combiner substitute the MEM if that is valid. */
1133 if (target == 0 || GET_CODE (target) != REG
1134 || GET_MODE (target) != departing_mode)
1136 /* Don't make BLKmode registers. If this looks like
1137 a BLKmode object being returned in a register, get
1138 the mode from that, otherwise abort. */
1139 if (departing_mode == BLKmode)
1141 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1143 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1144 arriving_mode = departing_mode;
1146 else
1147 abort ();
1150 target = gen_reg_rtx (departing_mode);
1153 /* If function's value was promoted before return,
1154 avoid machine mode mismatch when we substitute INLINE_TARGET.
1155 But TARGET is what we will return to the caller. */
1156 if (arriving_mode != departing_mode)
1158 /* Avoid creating a paradoxical subreg wider than
1159 BITS_PER_WORD, since that is illegal. */
1160 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1162 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1163 GET_MODE_BITSIZE (arriving_mode)))
1164 /* Maybe could be handled by using convert_move () ? */
1165 abort ();
1166 reg_to_map = gen_reg_rtx (arriving_mode);
1167 target = gen_lowpart (departing_mode, reg_to_map);
1169 else
1170 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1172 else
1173 reg_to_map = target;
1175 /* Usually, the result value is the machine's return register.
1176 Sometimes it may be a pseudo. Handle both cases. */
1177 if (REG_FUNCTION_VALUE_P (loc))
1178 map->inline_target = reg_to_map;
1179 else
1180 map->reg_map[REGNO (loc)] = reg_to_map;
1182 else if (GET_CODE (loc) == CONCAT)
1184 enum machine_mode departing_mode = TYPE_MODE (type);
1185 enum machine_mode arriving_mode
1186 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1188 if (departing_mode != arriving_mode)
1189 abort ();
1190 if (GET_CODE (XEXP (loc, 0)) != REG
1191 || GET_CODE (XEXP (loc, 1)) != REG)
1192 abort ();
1194 /* Don't use MEMs as direct targets because on some machines
1195 substituting a MEM for a REG makes invalid insns.
1196 Let the combiner substitute the MEM if that is valid. */
1197 if (target == 0 || GET_CODE (target) != REG
1198 || GET_MODE (target) != departing_mode)
1199 target = gen_reg_rtx (departing_mode);
1201 if (GET_CODE (target) != CONCAT)
1202 abort ();
1204 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1205 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1207 else
1208 abort ();
1210 /* Remap the exception handler data pointer from one to the other. */
1211 temp = get_exception_pointer (inl_f);
1212 if (temp)
1213 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1215 /* Initialize label_map. get_label_from_map will actually make
1216 the labels. */
1217 memset ((char *) &map->label_map[min_labelno], 0,
1218 (max_labelno - min_labelno) * sizeof (rtx));
1220 /* Make copies of the decls of the symbols in the inline function, so that
1221 the copies of the variables get declared in the current function. Set
1222 up things so that lookup_static_chain knows that to interpret registers
1223 in SAVE_EXPRs for TYPE_SIZEs as local. */
1224 inline_function_decl = fndecl;
1225 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1226 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1227 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1228 inline_function_decl = 0;
1230 /* Make a fresh binding contour that we can easily remove. Do this after
1231 expanding our arguments so cleanups are properly scoped. */
1232 expand_start_bindings_and_block (0, block);
1234 /* Sort the block-map so that it will be easy to find remapped
1235 blocks later. */
1236 qsort (&VARRAY_TREE (map->block_map, 0),
1237 map->block_map->elements_used,
1238 sizeof (tree),
1239 compare_blocks);
1241 /* Perform postincrements before actually calling the function. */
1242 emit_queue ();
1244 /* Clean up stack so that variables might have smaller offsets. */
1245 do_pending_stack_adjust ();
1247 /* Save a copy of the location of const_equiv_varray for
1248 mark_stores, called via note_stores. */
1249 global_const_equiv_varray = map->const_equiv_varray;
1251 /* If the called function does an alloca, save and restore the
1252 stack pointer around the call. This saves stack space, but
1253 also is required if this inline is being done between two
1254 pushes. */
1255 if (inl_f->calls_alloca)
1256 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1258 /* Map pseudos used for initial hard reg values. */
1259 setup_initial_hard_reg_value_integration (inl_f, map);
1261 /* Now copy the insns one by one. */
1262 copy_insn_list (insns, map, static_chain_value);
1264 /* Duplicate the EH regions. This will create an offset from the
1265 region numbers in the function we're inlining to the region
1266 numbers in the calling function. This must wait until after
1267 copy_insn_list, as we need the insn map to be complete. */
1268 eh_region_offset = duplicate_eh_regions (inl_f, map);
1270 /* Now copy the REG_NOTES for those insns. */
1271 copy_insn_notes (insns, map, eh_region_offset);
1273 /* If the insn sequence required one, emit the return label. */
1274 if (map->local_return_label)
1275 emit_label (map->local_return_label);
1277 /* Restore the stack pointer if we saved it above. */
1278 if (inl_f->calls_alloca)
1279 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1281 if (! cfun->x_whole_function_mode_p)
1282 /* In statement-at-a-time mode, we just tell the front-end to add
1283 this block to the list of blocks at this binding level. We
1284 can't do it the way it's done for function-at-a-time mode the
1285 superblocks have not been created yet. */
1286 (*lang_hooks.decls.insert_block) (block);
1287 else
1289 BLOCK_CHAIN (block)
1290 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1291 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1294 /* End the scope containing the copied formal parameter variables
1295 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1296 here so that expand_end_bindings will not check for unused
1297 variables. That's already been checked for when the inlined
1298 function was defined. */
1299 expand_end_bindings (NULL_TREE, 1, 1);
1301 /* Must mark the line number note after inlined functions as a repeat, so
1302 that the test coverage code can avoid counting the call twice. This
1303 just tells the code to ignore the immediately following line note, since
1304 there already exists a copy of this note before the expanded inline call.
1305 This line number note is still needed for debugging though, so we can't
1306 delete it. */
1307 if (flag_test_coverage)
1308 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1310 emit_line_note (input_filename, lineno);
1312 /* If the function returns a BLKmode object in a register, copy it
1313 out of the temp register into a BLKmode memory object. */
1314 if (target
1315 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1316 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1317 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1319 if (structure_value_addr)
1321 target = gen_rtx_MEM (TYPE_MODE (type),
1322 memory_address (TYPE_MODE (type),
1323 structure_value_addr));
1324 set_mem_attributes (target, type, 1);
1327 /* Make sure we free the things we explicitly allocated with xmalloc. */
1328 if (real_label_map)
1329 free (real_label_map);
1330 VARRAY_FREE (map->const_equiv_varray);
1331 free (map->reg_map);
1332 free (map->insn_map);
1333 free (map);
1334 free (arg_vals);
1335 free (arg_trees);
1337 inlining = inlining_previous;
1339 return target;
1342 /* Make copies of each insn in the given list using the mapping
1343 computed in expand_inline_function. This function may call itself for
1344 insns containing sequences.
1346 Copying is done in two passes, first the insns and then their REG_NOTES.
1348 If static_chain_value is nonzero, it represents the context-pointer
1349 register for the function. */
1351 static void
1352 copy_insn_list (insns, map, static_chain_value)
1353 rtx insns;
1354 struct inline_remap *map;
1355 rtx static_chain_value;
1357 int i;
1358 rtx insn;
1359 rtx temp;
1360 #ifdef HAVE_cc0
1361 rtx cc0_insn = 0;
1362 #endif
1363 rtx static_chain_mem = 0;
1365 /* Copy the insns one by one. Do this in two passes, first the insns and
1366 then their REG_NOTES. */
1368 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1370 for (insn = insns; insn; insn = NEXT_INSN (insn))
1372 rtx copy, pattern, set;
1374 map->orig_asm_operands_vector = 0;
1376 switch (GET_CODE (insn))
1378 case INSN:
1379 pattern = PATTERN (insn);
1380 set = single_set (insn);
1381 copy = 0;
1382 if (GET_CODE (pattern) == USE
1383 && GET_CODE (XEXP (pattern, 0)) == REG
1384 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1385 /* The (USE (REG n)) at return from the function should
1386 be ignored since we are changing (REG n) into
1387 inline_target. */
1388 break;
1390 /* Ignore setting a function value that we don't want to use. */
1391 if (map->inline_target == 0
1392 && set != 0
1393 && GET_CODE (SET_DEST (set)) == REG
1394 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1396 if (volatile_refs_p (SET_SRC (set)))
1398 rtx new_set;
1400 /* If we must not delete the source,
1401 load it into a new temporary. */
1402 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1404 new_set = single_set (copy);
1405 if (new_set == 0)
1406 abort ();
1408 SET_DEST (new_set)
1409 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1411 /* If the source and destination are the same and it
1412 has a note on it, keep the insn. */
1413 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1414 && REG_NOTES (insn) != 0)
1415 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1416 else
1417 break;
1420 /* Similarly if an ignored return value is clobbered. */
1421 else if (map->inline_target == 0
1422 && GET_CODE (pattern) == CLOBBER
1423 && GET_CODE (XEXP (pattern, 0)) == REG
1424 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1425 break;
1427 /* Look for the address of the static chain slot. The
1428 rtx_equal_p comparisons against the
1429 static_chain_incoming_rtx below may fail if the static
1430 chain is in memory and the address specified is not
1431 "legitimate". This happens on Xtensa where the static
1432 chain is at a negative offset from argp and where only
1433 positive offsets are legitimate. When the RTL is
1434 generated, the address is "legitimized" by copying it
1435 into a register, causing the rtx_equal_p comparisons to
1436 fail. This workaround looks for code that sets a
1437 register to the address of the static chain. Subsequent
1438 memory references via that register can then be
1439 identified as static chain references. We assume that
1440 the register is only assigned once, and that the static
1441 chain address is only live in one register at a time. */
1443 else if (static_chain_value != 0
1444 && set != 0
1445 && GET_CODE (static_chain_incoming_rtx) == MEM
1446 && GET_CODE (SET_DEST (set)) == REG
1447 && rtx_equal_p (SET_SRC (set),
1448 XEXP (static_chain_incoming_rtx, 0)))
1450 static_chain_mem =
1451 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1452 SET_DEST (set));
1454 /* emit the instruction in case it is used for something
1455 other than setting the static chain; if it's not used,
1456 it can always be removed as dead code */
1457 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1460 /* If this is setting the static chain rtx, omit it. */
1461 else if (static_chain_value != 0
1462 && set != 0
1463 && (rtx_equal_p (SET_DEST (set),
1464 static_chain_incoming_rtx)
1465 || (static_chain_mem
1466 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1467 break;
1469 /* If this is setting the static chain pseudo, set it from
1470 the value we want to give it instead. */
1471 else if (static_chain_value != 0
1472 && set != 0
1473 && (rtx_equal_p (SET_SRC (set),
1474 static_chain_incoming_rtx)
1475 || (static_chain_mem
1476 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1478 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1480 copy = emit_move_insn (newdest, static_chain_value);
1481 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1482 static_chain_value = 0;
1485 /* If this is setting the virtual stack vars register, this must
1486 be the code at the handler for a builtin longjmp. The value
1487 saved in the setjmp buffer will be the address of the frame
1488 we've made for this inlined instance within our frame. But we
1489 know the offset of that value so we can use it to reconstruct
1490 our virtual stack vars register from that value. If we are
1491 copying it from the stack pointer, leave it unchanged. */
1492 else if (set != 0
1493 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1495 HOST_WIDE_INT offset;
1496 temp = map->reg_map[REGNO (SET_DEST (set))];
1497 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1498 REGNO (temp)).rtx;
1500 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1501 offset = 0;
1502 else if (GET_CODE (temp) == PLUS
1503 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1504 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1505 offset = INTVAL (XEXP (temp, 1));
1506 else
1507 abort ();
1509 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1510 temp = SET_SRC (set);
1511 else
1512 temp = force_operand (plus_constant (SET_SRC (set),
1513 - offset),
1514 NULL_RTX);
1516 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1519 else
1520 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1521 /* REG_NOTES will be copied later. */
1523 #ifdef HAVE_cc0
1524 /* If this insn is setting CC0, it may need to look at
1525 the insn that uses CC0 to see what type of insn it is.
1526 In that case, the call to recog via validate_change will
1527 fail. So don't substitute constants here. Instead,
1528 do it when we emit the following insn.
1530 For example, see the pyr.md file. That machine has signed and
1531 unsigned compares. The compare patterns must check the
1532 following branch insn to see which what kind of compare to
1533 emit.
1535 If the previous insn set CC0, substitute constants on it as
1536 well. */
1537 if (sets_cc0_p (PATTERN (copy)) != 0)
1538 cc0_insn = copy;
1539 else
1541 if (cc0_insn)
1542 try_constants (cc0_insn, map);
1543 cc0_insn = 0;
1544 try_constants (copy, map);
1546 #else
1547 try_constants (copy, map);
1548 #endif
1549 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1550 break;
1552 case JUMP_INSN:
1553 if (map->integrating && returnjump_p (insn))
1555 if (map->local_return_label == 0)
1556 map->local_return_label = gen_label_rtx ();
1557 pattern = gen_jump (map->local_return_label);
1559 else
1560 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1562 copy = emit_jump_insn (pattern);
1564 #ifdef HAVE_cc0
1565 if (cc0_insn)
1566 try_constants (cc0_insn, map);
1567 cc0_insn = 0;
1568 #endif
1569 try_constants (copy, map);
1570 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1572 /* If this used to be a conditional jump insn but whose branch
1573 direction is now know, we must do something special. */
1574 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1576 #ifdef HAVE_cc0
1577 /* If the previous insn set cc0 for us, delete it. */
1578 if (only_sets_cc0_p (PREV_INSN (copy)))
1579 delete_related_insns (PREV_INSN (copy));
1580 #endif
1582 /* If this is now a no-op, delete it. */
1583 if (map->last_pc_value == pc_rtx)
1585 delete_related_insns (copy);
1586 copy = 0;
1588 else
1589 /* Otherwise, this is unconditional jump so we must put a
1590 BARRIER after it. We could do some dead code elimination
1591 here, but jump.c will do it just as well. */
1592 emit_barrier ();
1594 break;
1596 case CALL_INSN:
1597 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1598 three attached sequences: normal call, sibling call and tail
1599 recursion. */
1600 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1602 rtx sequence[3];
1603 rtx tail_label;
1605 for (i = 0; i < 3; i++)
1607 rtx seq;
1609 sequence[i] = NULL_RTX;
1610 seq = XEXP (PATTERN (insn), i);
1611 if (seq)
1613 start_sequence ();
1614 copy_insn_list (seq, map, static_chain_value);
1615 sequence[i] = get_insns ();
1616 end_sequence ();
1620 /* Find the new tail recursion label.
1621 It will already be substituted into sequence[2]. */
1622 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1623 map, 0);
1625 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1626 sequence[0],
1627 sequence[1],
1628 sequence[2],
1629 tail_label));
1630 break;
1633 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1634 copy = emit_call_insn (pattern);
1636 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1637 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1638 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1640 /* Because the USAGE information potentially contains objects other
1641 than hard registers, we need to copy it. */
1643 CALL_INSN_FUNCTION_USAGE (copy)
1644 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1645 map, 0);
1647 #ifdef HAVE_cc0
1648 if (cc0_insn)
1649 try_constants (cc0_insn, map);
1650 cc0_insn = 0;
1651 #endif
1652 try_constants (copy, map);
1654 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1656 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1657 break;
1659 case CODE_LABEL:
1660 copy = emit_label (get_label_from_map (map,
1661 CODE_LABEL_NUMBER (insn)));
1662 LABEL_NAME (copy) = LABEL_NAME (insn);
1663 map->const_age++;
1664 break;
1666 case BARRIER:
1667 copy = emit_barrier ();
1668 break;
1670 case NOTE:
1671 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1673 copy = emit_label (get_label_from_map (map,
1674 CODE_LABEL_NUMBER (insn)));
1675 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1676 map->const_age++;
1677 break;
1680 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1681 discarded because it is important to have only one of
1682 each in the current function.
1684 NOTE_INSN_DELETED notes aren't useful. */
1686 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1687 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1688 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1690 copy = emit_note (NOTE_SOURCE_FILE (insn),
1691 NOTE_LINE_NUMBER (insn));
1692 if (copy
1693 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1694 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1695 && NOTE_BLOCK (insn))
1697 tree *mapped_block_p;
1699 mapped_block_p
1700 = (tree *) bsearch (NOTE_BLOCK (insn),
1701 &VARRAY_TREE (map->block_map, 0),
1702 map->block_map->elements_used,
1703 sizeof (tree),
1704 find_block);
1706 if (!mapped_block_p)
1707 abort ();
1708 else
1709 NOTE_BLOCK (copy) = *mapped_block_p;
1711 else if (copy
1712 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1713 NOTE_EXPECTED_VALUE (copy)
1714 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1715 map, 0);
1717 else
1718 copy = 0;
1719 break;
1721 default:
1722 abort ();
1725 if (copy)
1726 RTX_INTEGRATED_P (copy) = 1;
1728 map->insn_map[INSN_UID (insn)] = copy;
1732 /* Copy the REG_NOTES. Increment const_age, so that only constants
1733 from parameters can be substituted in. These are the only ones
1734 that are valid across the entire function. */
1736 static void
1737 copy_insn_notes (insns, map, eh_region_offset)
1738 rtx insns;
1739 struct inline_remap *map;
1740 int eh_region_offset;
1742 rtx insn, new_insn;
1744 map->const_age++;
1745 for (insn = insns; insn; insn = NEXT_INSN (insn))
1747 if (! INSN_P (insn))
1748 continue;
1750 new_insn = map->insn_map[INSN_UID (insn)];
1751 if (! new_insn)
1752 continue;
1754 if (REG_NOTES (insn))
1756 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1758 /* We must also do subst_constants, in case one of our parameters
1759 has const type and constant value. */
1760 subst_constants (&note, NULL_RTX, map, 0);
1761 apply_change_group ();
1762 REG_NOTES (new_insn) = note;
1764 /* Delete any REG_LABEL notes from the chain. Remap any
1765 REG_EH_REGION notes. */
1766 for (; note; note = next)
1768 next = XEXP (note, 1);
1769 if (REG_NOTE_KIND (note) == REG_LABEL)
1770 remove_note (new_insn, note);
1771 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1772 && INTVAL (XEXP (note, 0)) > 0)
1773 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1774 + eh_region_offset);
1778 if (GET_CODE (insn) == CALL_INSN
1779 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1781 int i;
1782 for (i = 0; i < 3; i++)
1783 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1786 if (GET_CODE (insn) == JUMP_INSN
1787 && GET_CODE (PATTERN (insn)) == RESX)
1788 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1792 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1793 push all of those decls and give each one the corresponding home. */
1795 static void
1796 integrate_parm_decls (args, map, arg_vector)
1797 tree args;
1798 struct inline_remap *map;
1799 rtvec arg_vector;
1801 tree tail;
1802 int i;
1804 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1806 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1807 current_function_decl);
1808 rtx new_decl_rtl
1809 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1811 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1812 here, but that's going to require some more work. */
1813 /* DECL_INCOMING_RTL (decl) = ?; */
1814 /* Fully instantiate the address with the equivalent form so that the
1815 debugging information contains the actual register, instead of the
1816 virtual register. Do this by not passing an insn to
1817 subst_constants. */
1818 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1819 apply_change_group ();
1820 SET_DECL_RTL (decl, new_decl_rtl);
1824 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1825 current function a tree of contexts isomorphic to the one that is given.
1827 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1828 registers used in the DECL_RTL field should be remapped. If it is zero,
1829 no mapping is necessary. */
1831 static tree
1832 integrate_decl_tree (let, map)
1833 tree let;
1834 struct inline_remap *map;
1836 tree t;
1837 tree new_block;
1838 tree *next;
1840 new_block = make_node (BLOCK);
1841 VARRAY_PUSH_TREE (map->block_map, new_block);
1842 next = &BLOCK_VARS (new_block);
1844 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1846 tree d;
1848 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1850 if (DECL_RTL_SET_P (t))
1852 rtx r;
1854 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1856 /* Fully instantiate the address with the equivalent form so that the
1857 debugging information contains the actual register, instead of the
1858 virtual register. Do this by not passing an insn to
1859 subst_constants. */
1860 r = DECL_RTL (d);
1861 subst_constants (&r, NULL_RTX, map, 1);
1862 SET_DECL_RTL (d, r);
1864 apply_change_group ();
1867 /* Add this declaration to the list of variables in the new
1868 block. */
1869 *next = d;
1870 next = &TREE_CHAIN (d);
1873 next = &BLOCK_SUBBLOCKS (new_block);
1874 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1876 *next = integrate_decl_tree (t, map);
1877 BLOCK_SUPERCONTEXT (*next) = new_block;
1878 next = &BLOCK_CHAIN (*next);
1881 TREE_USED (new_block) = TREE_USED (let);
1882 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1884 return new_block;
1887 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1888 except for those few rtx codes that are sharable.
1890 We always return an rtx that is similar to that incoming rtx, with the
1891 exception of possibly changing a REG to a SUBREG or vice versa. No
1892 rtl is ever emitted.
1894 If FOR_LHS is nonzero, if means we are processing something that will
1895 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1896 inlining since we need to be conservative in how it is set for
1897 such cases.
1899 Handle constants that need to be placed in the constant pool by
1900 calling `force_const_mem'. */
1903 copy_rtx_and_substitute (orig, map, for_lhs)
1904 rtx orig;
1905 struct inline_remap *map;
1906 int for_lhs;
1908 rtx copy, temp;
1909 int i, j;
1910 RTX_CODE code;
1911 enum machine_mode mode;
1912 const char *format_ptr;
1913 int regno;
1915 if (orig == 0)
1916 return 0;
1918 code = GET_CODE (orig);
1919 mode = GET_MODE (orig);
1921 switch (code)
1923 case REG:
1924 /* If the stack pointer register shows up, it must be part of
1925 stack-adjustments (*not* because we eliminated the frame pointer!).
1926 Small hard registers are returned as-is. Pseudo-registers
1927 go through their `reg_map'. */
1928 regno = REGNO (orig);
1929 if (regno <= LAST_VIRTUAL_REGISTER
1930 || (map->integrating
1931 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1933 /* Some hard registers are also mapped,
1934 but others are not translated. */
1935 if (map->reg_map[regno] != 0)
1936 return map->reg_map[regno];
1938 /* If this is the virtual frame pointer, make space in current
1939 function's stack frame for the stack frame of the inline function.
1941 Copy the address of this area into a pseudo. Map
1942 virtual_stack_vars_rtx to this pseudo and set up a constant
1943 equivalence for it to be the address. This will substitute the
1944 address into insns where it can be substituted and use the new
1945 pseudo where it can't. */
1946 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1948 rtx loc, seq;
1949 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1950 #ifdef FRAME_GROWS_DOWNWARD
1951 int alignment
1952 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1953 / BITS_PER_UNIT);
1955 /* In this case, virtual_stack_vars_rtx points to one byte
1956 higher than the top of the frame area. So make sure we
1957 allocate a big enough chunk to keep the frame pointer
1958 aligned like a real one. */
1959 if (alignment)
1960 size = CEIL_ROUND (size, alignment);
1961 #endif
1962 start_sequence ();
1963 loc = assign_stack_temp (BLKmode, size, 1);
1964 loc = XEXP (loc, 0);
1965 #ifdef FRAME_GROWS_DOWNWARD
1966 /* In this case, virtual_stack_vars_rtx points to one byte
1967 higher than the top of the frame area. So compute the offset
1968 to one byte higher than our substitute frame. */
1969 loc = plus_constant (loc, size);
1970 #endif
1971 map->reg_map[regno] = temp
1972 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1974 #ifdef STACK_BOUNDARY
1975 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1976 #endif
1978 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1980 seq = get_insns ();
1981 end_sequence ();
1982 emit_insn_after (seq, map->insns_at_start);
1983 return temp;
1985 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1986 || (map->integrating
1987 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1988 == orig)))
1990 /* Do the same for a block to contain any arguments referenced
1991 in memory. */
1992 rtx loc, seq;
1993 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1995 start_sequence ();
1996 loc = assign_stack_temp (BLKmode, size, 1);
1997 loc = XEXP (loc, 0);
1998 /* When arguments grow downward, the virtual incoming
1999 args pointer points to the top of the argument block,
2000 so the remapped location better do the same. */
2001 #ifdef ARGS_GROW_DOWNWARD
2002 loc = plus_constant (loc, size);
2003 #endif
2004 map->reg_map[regno] = temp
2005 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2007 #ifdef STACK_BOUNDARY
2008 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
2009 #endif
2011 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2013 seq = get_insns ();
2014 end_sequence ();
2015 emit_insn_after (seq, map->insns_at_start);
2016 return temp;
2018 else if (REG_FUNCTION_VALUE_P (orig))
2020 /* This is a reference to the function return value. If
2021 the function doesn't have a return value, error. If the
2022 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2023 if (map->inline_target == 0)
2025 if (rtx_equal_function_value_matters)
2026 /* This is an ignored return value. We must not
2027 leave it in with REG_FUNCTION_VALUE_P set, since
2028 that would confuse subsequent inlining of the
2029 current function into a later function. */
2030 return gen_rtx_REG (GET_MODE (orig), regno);
2031 else
2032 /* Must be unrolling loops or replicating code if we
2033 reach here, so return the register unchanged. */
2034 return orig;
2036 else if (GET_MODE (map->inline_target) != BLKmode
2037 && mode != GET_MODE (map->inline_target))
2038 return gen_lowpart (mode, map->inline_target);
2039 else
2040 return map->inline_target;
2042 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2043 /* If leaf_renumber_regs_insn() might remap this register to
2044 some other number, make sure we don't share it with the
2045 inlined function, otherwise delayed optimization of the
2046 inlined function may change it in place, breaking our
2047 reference to it. We may still shared it within the
2048 function, so create an entry for this register in the
2049 reg_map. */
2050 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2051 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2053 if (!map->leaf_reg_map[regno][mode])
2054 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2055 return map->leaf_reg_map[regno][mode];
2057 #endif
2058 else
2059 return orig;
2061 abort ();
2063 if (map->reg_map[regno] == NULL)
2065 map->reg_map[regno] = gen_reg_rtx (mode);
2066 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2067 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2068 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2069 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2071 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2072 mark_reg_pointer (map->reg_map[regno],
2073 map->regno_pointer_align[regno]);
2075 return map->reg_map[regno];
2077 case SUBREG:
2078 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2079 return simplify_gen_subreg (GET_MODE (orig), copy,
2080 GET_MODE (SUBREG_REG (orig)),
2081 SUBREG_BYTE (orig));
2083 case ADDRESSOF:
2084 copy = gen_rtx_ADDRESSOF (mode,
2085 copy_rtx_and_substitute (XEXP (orig, 0),
2086 map, for_lhs),
2087 0, ADDRESSOF_DECL (orig));
2088 regno = ADDRESSOF_REGNO (orig);
2089 if (map->reg_map[regno])
2090 regno = REGNO (map->reg_map[regno]);
2091 else if (regno > LAST_VIRTUAL_REGISTER)
2093 temp = XEXP (orig, 0);
2094 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2095 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2096 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2097 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2098 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2100 /* Objects may initially be represented as registers, but
2101 but turned into a MEM if their address is taken by
2102 put_var_into_stack. Therefore, the register table may have
2103 entries which are MEMs.
2105 We briefly tried to clear such entries, but that ended up
2106 cascading into many changes due to the optimizers not being
2107 prepared for empty entries in the register table. So we've
2108 decided to allow the MEMs in the register table for now. */
2109 if (REG_P (map->x_regno_reg_rtx[regno])
2110 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2111 mark_reg_pointer (map->reg_map[regno],
2112 map->regno_pointer_align[regno]);
2113 regno = REGNO (map->reg_map[regno]);
2115 ADDRESSOF_REGNO (copy) = regno;
2116 return copy;
2118 case USE:
2119 case CLOBBER:
2120 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2121 to (use foo) if the original insn didn't have a subreg.
2122 Removing the subreg distorts the VAX movstrhi pattern
2123 by changing the mode of an operand. */
2124 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2125 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2126 copy = SUBREG_REG (copy);
2127 return gen_rtx_fmt_e (code, VOIDmode, copy);
2129 /* We need to handle "deleted" labels that appear in the DECL_RTL
2130 of a LABEL_DECL. */
2131 case NOTE:
2132 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2133 break;
2135 /* ... FALLTHRU ... */
2136 case CODE_LABEL:
2137 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2138 = LABEL_PRESERVE_P (orig);
2139 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2141 case LABEL_REF:
2142 copy
2143 = gen_rtx_LABEL_REF
2144 (mode,
2145 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2146 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2148 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2150 /* The fact that this label was previously nonlocal does not mean
2151 it still is, so we must check if it is within the range of
2152 this function's labels. */
2153 LABEL_REF_NONLOCAL_P (copy)
2154 = (LABEL_REF_NONLOCAL_P (orig)
2155 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2156 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2158 /* If we have made a nonlocal label local, it means that this
2159 inlined call will be referring to our nonlocal goto handler.
2160 So make sure we create one for this block; we normally would
2161 not since this is not otherwise considered a "call". */
2162 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2163 function_call_count++;
2165 return copy;
2167 case PC:
2168 case CC0:
2169 case CONST_INT:
2170 case CONST_VECTOR:
2171 return orig;
2173 case SYMBOL_REF:
2174 /* Symbols which represent the address of a label stored in the constant
2175 pool must be modified to point to a constant pool entry for the
2176 remapped label. Otherwise, symbols are returned unchanged. */
2177 if (CONSTANT_POOL_ADDRESS_P (orig))
2179 struct function *f = inlining ? inlining : cfun;
2180 rtx constant = get_pool_constant_for_function (f, orig);
2181 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2182 if (inlining)
2184 rtx temp = force_const_mem (const_mode,
2185 copy_rtx_and_substitute (constant,
2186 map, 0));
2188 #if 0
2189 /* Legitimizing the address here is incorrect.
2191 Since we had a SYMBOL_REF before, we can assume it is valid
2192 to have one in this position in the insn.
2194 Also, change_address may create new registers. These
2195 registers will not have valid reg_map entries. This can
2196 cause try_constants() to fail because assumes that all
2197 registers in the rtx have valid reg_map entries, and it may
2198 end up replacing one of these new registers with junk. */
2200 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2201 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2202 #endif
2204 temp = XEXP (temp, 0);
2206 #ifdef POINTERS_EXTEND_UNSIGNED
2207 if (GET_MODE (temp) != GET_MODE (orig))
2208 temp = convert_memory_address (GET_MODE (orig), temp);
2209 #endif
2210 return temp;
2212 else if (GET_CODE (constant) == LABEL_REF)
2213 return XEXP (force_const_mem
2214 (GET_MODE (orig),
2215 copy_rtx_and_substitute (constant, map, for_lhs)),
2219 return orig;
2221 case CONST_DOUBLE:
2222 /* We have to make a new copy of this CONST_DOUBLE because don't want
2223 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2224 duplicate of a CONST_DOUBLE we have already seen. */
2225 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2227 REAL_VALUE_TYPE d;
2229 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2230 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2232 else
2233 return immed_double_const (CONST_DOUBLE_LOW (orig),
2234 CONST_DOUBLE_HIGH (orig), VOIDmode);
2236 case CONST:
2237 /* Make new constant pool entry for a constant
2238 that was in the pool of the inline function. */
2239 if (RTX_INTEGRATED_P (orig))
2240 abort ();
2241 break;
2243 case ASM_OPERANDS:
2244 /* If a single asm insn contains multiple output operands then
2245 it contains multiple ASM_OPERANDS rtx's that share the input
2246 and constraint vecs. We must make sure that the copied insn
2247 continues to share it. */
2248 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2250 copy = rtx_alloc (ASM_OPERANDS);
2251 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2252 PUT_MODE (copy, GET_MODE (orig));
2253 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2254 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2255 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2256 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2257 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2258 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2259 = map->copy_asm_constraints_vector;
2260 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2261 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2262 return copy;
2264 break;
2266 case CALL:
2267 /* This is given special treatment because the first
2268 operand of a CALL is a (MEM ...) which may get
2269 forced into a register for cse. This is undesirable
2270 if function-address cse isn't wanted or if we won't do cse. */
2271 #ifndef NO_FUNCTION_CSE
2272 if (! (optimize && ! flag_no_function_cse))
2273 #endif
2275 rtx copy
2276 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2277 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2278 map, 0));
2280 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2282 return
2283 gen_rtx_CALL (GET_MODE (orig), copy,
2284 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2286 break;
2288 #if 0
2289 /* Must be ifdefed out for loop unrolling to work. */
2290 case RETURN:
2291 abort ();
2292 #endif
2294 case SET:
2295 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2296 Adjust the setting by the offset of the area we made.
2297 If the nonlocal goto is into the current function,
2298 this will result in unnecessarily bad code, but should work. */
2299 if (SET_DEST (orig) == virtual_stack_vars_rtx
2300 || SET_DEST (orig) == virtual_incoming_args_rtx)
2302 /* In case a translation hasn't occurred already, make one now. */
2303 rtx equiv_reg;
2304 rtx equiv_loc;
2305 HOST_WIDE_INT loc_offset;
2307 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2308 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2309 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2310 REGNO (equiv_reg)).rtx;
2311 loc_offset
2312 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2314 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2315 force_operand
2316 (plus_constant
2317 (copy_rtx_and_substitute (SET_SRC (orig),
2318 map, 0),
2319 - loc_offset),
2320 NULL_RTX));
2322 else
2323 return gen_rtx_SET (VOIDmode,
2324 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2325 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2326 break;
2328 case MEM:
2329 if (inlining
2330 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2331 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2333 enum machine_mode const_mode
2334 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2335 rtx constant
2336 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2338 constant = copy_rtx_and_substitute (constant, map, 0);
2340 /* If this was an address of a constant pool entry that itself
2341 had to be placed in the constant pool, it might not be a
2342 valid address. So the recursive call might have turned it
2343 into a register. In that case, it isn't a constant any
2344 more, so return it. This has the potential of changing a
2345 MEM into a REG, but we'll assume that it safe. */
2346 if (! CONSTANT_P (constant))
2347 return constant;
2349 return validize_mem (force_const_mem (const_mode, constant));
2352 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2353 map, 0));
2354 MEM_COPY_ATTRIBUTES (copy, orig);
2356 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2357 since this may be an indirect reference to a parameter and the
2358 actual may not be readonly. */
2359 if (inlining && !for_lhs)
2360 RTX_UNCHANGING_P (copy) = 0;
2362 /* If inlining, squish aliasing data that references the subroutine's
2363 parameter list, since that's no longer applicable. */
2364 if (inlining && MEM_EXPR (copy)
2365 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2366 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2367 set_mem_expr (copy, NULL_TREE);
2369 return copy;
2371 default:
2372 break;
2375 copy = rtx_alloc (code);
2376 PUT_MODE (copy, mode);
2377 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2378 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2379 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2381 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2383 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2385 switch (*format_ptr++)
2387 case '0':
2388 /* Copy this through the wide int field; that's safest. */
2389 X0WINT (copy, i) = X0WINT (orig, i);
2390 break;
2392 case 'e':
2393 XEXP (copy, i)
2394 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2395 break;
2397 case 'u':
2398 /* Change any references to old-insns to point to the
2399 corresponding copied insns. */
2400 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2401 break;
2403 case 'E':
2404 XVEC (copy, i) = XVEC (orig, i);
2405 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2407 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2408 for (j = 0; j < XVECLEN (copy, i); j++)
2409 XVECEXP (copy, i, j)
2410 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2411 map, for_lhs);
2413 break;
2415 case 'w':
2416 XWINT (copy, i) = XWINT (orig, i);
2417 break;
2419 case 'i':
2420 XINT (copy, i) = XINT (orig, i);
2421 break;
2423 case 's':
2424 XSTR (copy, i) = XSTR (orig, i);
2425 break;
2427 case 't':
2428 XTREE (copy, i) = XTREE (orig, i);
2429 break;
2431 default:
2432 abort ();
2436 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2438 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2439 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2440 map->copy_asm_constraints_vector
2441 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2444 return copy;
2447 /* Substitute known constant values into INSN, if that is valid. */
2449 void
2450 try_constants (insn, map)
2451 rtx insn;
2452 struct inline_remap *map;
2454 int i;
2456 map->num_sets = 0;
2458 /* First try just updating addresses, then other things. This is
2459 important when we have something like the store of a constant
2460 into memory and we can update the memory address but the machine
2461 does not support a constant source. */
2462 subst_constants (&PATTERN (insn), insn, map, 1);
2463 apply_change_group ();
2464 subst_constants (&PATTERN (insn), insn, map, 0);
2465 apply_change_group ();
2467 /* Show we don't know the value of anything stored or clobbered. */
2468 note_stores (PATTERN (insn), mark_stores, NULL);
2469 map->last_pc_value = 0;
2470 #ifdef HAVE_cc0
2471 map->last_cc0_value = 0;
2472 #endif
2474 /* Set up any constant equivalences made in this insn. */
2475 for (i = 0; i < map->num_sets; i++)
2477 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2479 int regno = REGNO (map->equiv_sets[i].dest);
2481 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2482 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2483 /* Following clause is a hack to make case work where GNU C++
2484 reassigns a variable to make cse work right. */
2485 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2486 regno).rtx,
2487 map->equiv_sets[i].equiv))
2488 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2489 map->equiv_sets[i].equiv, map->const_age);
2491 else if (map->equiv_sets[i].dest == pc_rtx)
2492 map->last_pc_value = map->equiv_sets[i].equiv;
2493 #ifdef HAVE_cc0
2494 else if (map->equiv_sets[i].dest == cc0_rtx)
2495 map->last_cc0_value = map->equiv_sets[i].equiv;
2496 #endif
2500 /* Substitute known constants for pseudo regs in the contents of LOC,
2501 which are part of INSN.
2502 If INSN is zero, the substitution should always be done (this is used to
2503 update DECL_RTL).
2504 These changes are taken out by try_constants if the result is not valid.
2506 Note that we are more concerned with determining when the result of a SET
2507 is a constant, for further propagation, than actually inserting constants
2508 into insns; cse will do the latter task better.
2510 This function is also used to adjust address of items previously addressed
2511 via the virtual stack variable or virtual incoming arguments registers.
2513 If MEMONLY is nonzero, only make changes inside a MEM. */
2515 static void
2516 subst_constants (loc, insn, map, memonly)
2517 rtx *loc;
2518 rtx insn;
2519 struct inline_remap *map;
2520 int memonly;
2522 rtx x = *loc;
2523 int i, j;
2524 enum rtx_code code;
2525 const char *format_ptr;
2526 int num_changes = num_validated_changes ();
2527 rtx new = 0;
2528 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2530 code = GET_CODE (x);
2532 switch (code)
2534 case PC:
2535 case CONST_INT:
2536 case CONST_DOUBLE:
2537 case CONST_VECTOR:
2538 case SYMBOL_REF:
2539 case CONST:
2540 case LABEL_REF:
2541 case ADDRESS:
2542 return;
2544 #ifdef HAVE_cc0
2545 case CC0:
2546 if (! memonly)
2547 validate_change (insn, loc, map->last_cc0_value, 1);
2548 return;
2549 #endif
2551 case USE:
2552 case CLOBBER:
2553 /* The only thing we can do with a USE or CLOBBER is possibly do
2554 some substitutions in a MEM within it. */
2555 if (GET_CODE (XEXP (x, 0)) == MEM)
2556 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2557 return;
2559 case REG:
2560 /* Substitute for parms and known constants. Don't replace
2561 hard regs used as user variables with constants. */
2562 if (! memonly)
2564 int regno = REGNO (x);
2565 struct const_equiv_data *p;
2567 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2568 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2569 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2570 p->rtx != 0)
2571 && p->age >= map->const_age)
2572 validate_change (insn, loc, p->rtx, 1);
2574 return;
2576 case SUBREG:
2577 /* SUBREG applied to something other than a reg
2578 should be treated as ordinary, since that must
2579 be a special hack and we don't know how to treat it specially.
2580 Consider for example mulsidi3 in m68k.md.
2581 Ordinary SUBREG of a REG needs this special treatment. */
2582 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2584 rtx inner = SUBREG_REG (x);
2585 rtx new = 0;
2587 /* We can't call subst_constants on &SUBREG_REG (x) because any
2588 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2589 see what is inside, try to form the new SUBREG and see if that is
2590 valid. We handle two cases: extracting a full word in an
2591 integral mode and extracting the low part. */
2592 subst_constants (&inner, NULL_RTX, map, 0);
2593 new = simplify_gen_subreg (GET_MODE (x), inner,
2594 GET_MODE (SUBREG_REG (x)),
2595 SUBREG_BYTE (x));
2597 if (new)
2598 validate_change (insn, loc, new, 1);
2599 else
2600 cancel_changes (num_changes);
2602 return;
2604 break;
2606 case MEM:
2607 subst_constants (&XEXP (x, 0), insn, map, 0);
2609 /* If a memory address got spoiled, change it back. */
2610 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2611 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2612 cancel_changes (num_changes);
2613 return;
2615 case SET:
2617 /* Substitute constants in our source, and in any arguments to a
2618 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2619 itself. */
2620 rtx *dest_loc = &SET_DEST (x);
2621 rtx dest = *dest_loc;
2622 rtx src, tem;
2623 enum machine_mode compare_mode = VOIDmode;
2625 /* If SET_SRC is a COMPARE which subst_constants would turn into
2626 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2627 is to be done. */
2628 if (GET_CODE (SET_SRC (x)) == COMPARE)
2630 src = SET_SRC (x);
2631 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2632 || CC0_P (dest))
2634 compare_mode = GET_MODE (XEXP (src, 0));
2635 if (compare_mode == VOIDmode)
2636 compare_mode = GET_MODE (XEXP (src, 1));
2640 subst_constants (&SET_SRC (x), insn, map, memonly);
2641 src = SET_SRC (x);
2643 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2644 || GET_CODE (*dest_loc) == SUBREG
2645 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2647 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2649 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2650 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2652 dest_loc = &XEXP (*dest_loc, 0);
2655 /* Do substitute in the address of a destination in memory. */
2656 if (GET_CODE (*dest_loc) == MEM)
2657 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2659 /* Check for the case of DEST a SUBREG, both it and the underlying
2660 register are less than one word, and the SUBREG has the wider mode.
2661 In the case, we are really setting the underlying register to the
2662 source converted to the mode of DEST. So indicate that. */
2663 if (GET_CODE (dest) == SUBREG
2664 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2665 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2666 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2667 <= GET_MODE_SIZE (GET_MODE (dest)))
2668 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2669 src)))
2670 src = tem, dest = SUBREG_REG (dest);
2672 /* If storing a recognizable value save it for later recording. */
2673 if ((map->num_sets < MAX_RECOG_OPERANDS)
2674 && (CONSTANT_P (src)
2675 || (GET_CODE (src) == REG
2676 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2677 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2678 || (GET_CODE (src) == PLUS
2679 && GET_CODE (XEXP (src, 0)) == REG
2680 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2681 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2682 && CONSTANT_P (XEXP (src, 1)))
2683 || GET_CODE (src) == COMPARE
2684 || CC0_P (dest)
2685 || (dest == pc_rtx
2686 && (src == pc_rtx || GET_CODE (src) == RETURN
2687 || GET_CODE (src) == LABEL_REF))))
2689 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2690 it will cause us to save the COMPARE with any constants
2691 substituted, which is what we want for later. */
2692 rtx src_copy = copy_rtx (src);
2693 map->equiv_sets[map->num_sets].equiv = src_copy;
2694 map->equiv_sets[map->num_sets++].dest = dest;
2695 if (compare_mode != VOIDmode
2696 && GET_CODE (src) == COMPARE
2697 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2698 || CC0_P (dest))
2699 && GET_MODE (XEXP (src, 0)) == VOIDmode
2700 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2702 map->compare_src = src_copy;
2703 map->compare_mode = compare_mode;
2707 return;
2709 default:
2710 break;
2713 format_ptr = GET_RTX_FORMAT (code);
2715 /* If the first operand is an expression, save its mode for later. */
2716 if (*format_ptr == 'e')
2717 op0_mode = GET_MODE (XEXP (x, 0));
2719 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2721 switch (*format_ptr++)
2723 case '0':
2724 break;
2726 case 'e':
2727 if (XEXP (x, i))
2728 subst_constants (&XEXP (x, i), insn, map, memonly);
2729 break;
2731 case 'u':
2732 case 'i':
2733 case 's':
2734 case 'w':
2735 case 'n':
2736 case 't':
2737 case 'B':
2738 break;
2740 case 'E':
2741 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2742 for (j = 0; j < XVECLEN (x, i); j++)
2743 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2745 break;
2747 default:
2748 abort ();
2752 /* If this is a commutative operation, move a constant to the second
2753 operand unless the second operand is already a CONST_INT. */
2754 if (! memonly
2755 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2756 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2758 rtx tem = XEXP (x, 0);
2759 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2760 validate_change (insn, &XEXP (x, 1), tem, 1);
2763 /* Simplify the expression in case we put in some constants. */
2764 if (! memonly)
2765 switch (GET_RTX_CLASS (code))
2767 case '1':
2768 if (op0_mode == MAX_MACHINE_MODE)
2769 abort ();
2770 new = simplify_unary_operation (code, GET_MODE (x),
2771 XEXP (x, 0), op0_mode);
2772 break;
2774 case '<':
2776 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2778 if (op_mode == VOIDmode)
2779 op_mode = GET_MODE (XEXP (x, 1));
2780 new = simplify_relational_operation (code, op_mode,
2781 XEXP (x, 0), XEXP (x, 1));
2782 #ifdef FLOAT_STORE_FLAG_VALUE
2783 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2785 enum machine_mode mode = GET_MODE (x);
2786 if (new == const0_rtx)
2787 new = CONST0_RTX (mode);
2788 else
2790 REAL_VALUE_TYPE val;
2792 /* Avoid automatic aggregate initialization. */
2793 val = FLOAT_STORE_FLAG_VALUE (mode);
2794 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2797 #endif
2798 break;
2801 case '2':
2802 case 'c':
2803 new = simplify_binary_operation (code, GET_MODE (x),
2804 XEXP (x, 0), XEXP (x, 1));
2805 break;
2807 case 'b':
2808 case '3':
2809 if (op0_mode == MAX_MACHINE_MODE)
2810 abort ();
2812 if (code == IF_THEN_ELSE)
2814 rtx op0 = XEXP (x, 0);
2816 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2817 && GET_MODE (op0) == VOIDmode
2818 && ! side_effects_p (op0)
2819 && XEXP (op0, 0) == map->compare_src
2820 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2822 /* We have compare of two VOIDmode constants for which
2823 we recorded the comparison mode. */
2824 rtx temp =
2825 simplify_relational_operation (GET_CODE (op0),
2826 map->compare_mode,
2827 XEXP (op0, 0),
2828 XEXP (op0, 1));
2830 if (temp == const0_rtx)
2831 new = XEXP (x, 2);
2832 else if (temp == const1_rtx)
2833 new = XEXP (x, 1);
2836 if (!new)
2837 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2838 XEXP (x, 0), XEXP (x, 1),
2839 XEXP (x, 2));
2840 break;
2843 if (new)
2844 validate_change (insn, loc, new, 1);
2847 /* Show that register modified no longer contain known constants. We are
2848 called from note_stores with parts of the new insn. */
2850 static void
2851 mark_stores (dest, x, data)
2852 rtx dest;
2853 rtx x ATTRIBUTE_UNUSED;
2854 void *data ATTRIBUTE_UNUSED;
2856 int regno = -1;
2857 enum machine_mode mode = VOIDmode;
2859 /* DEST is always the innermost thing set, except in the case of
2860 SUBREGs of hard registers. */
2862 if (GET_CODE (dest) == REG)
2863 regno = REGNO (dest), mode = GET_MODE (dest);
2864 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2866 regno = REGNO (SUBREG_REG (dest));
2867 if (regno < FIRST_PSEUDO_REGISTER)
2868 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2869 GET_MODE (SUBREG_REG (dest)),
2870 SUBREG_BYTE (dest),
2871 GET_MODE (dest));
2872 mode = GET_MODE (SUBREG_REG (dest));
2875 if (regno >= 0)
2877 unsigned int uregno = regno;
2878 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2879 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2880 unsigned int i;
2882 /* Ignore virtual stack var or virtual arg register since those
2883 are handled separately. */
2884 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2885 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2886 for (i = uregno; i <= last_reg; i++)
2887 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2888 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2892 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2893 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2894 that it points to the node itself, thus indicating that the node is its
2895 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2896 the given node is NULL, recursively descend the decl/block tree which
2897 it is the root of, and for each other ..._DECL or BLOCK node contained
2898 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2899 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2900 values to point to themselves. */
2902 static void
2903 set_block_origin_self (stmt)
2904 tree stmt;
2906 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2908 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2911 tree local_decl;
2913 for (local_decl = BLOCK_VARS (stmt);
2914 local_decl != NULL_TREE;
2915 local_decl = TREE_CHAIN (local_decl))
2916 set_decl_origin_self (local_decl); /* Potential recursion. */
2920 tree subblock;
2922 for (subblock = BLOCK_SUBBLOCKS (stmt);
2923 subblock != NULL_TREE;
2924 subblock = BLOCK_CHAIN (subblock))
2925 set_block_origin_self (subblock); /* Recurse. */
2930 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2931 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2932 node to so that it points to the node itself, thus indicating that the
2933 node represents its own (abstract) origin. Additionally, if the
2934 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2935 the decl/block tree of which the given node is the root of, and for
2936 each other ..._DECL or BLOCK node contained therein whose
2937 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2938 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2939 point to themselves. */
2941 void
2942 set_decl_origin_self (decl)
2943 tree decl;
2945 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2947 DECL_ABSTRACT_ORIGIN (decl) = decl;
2948 if (TREE_CODE (decl) == FUNCTION_DECL)
2950 tree arg;
2952 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2953 DECL_ABSTRACT_ORIGIN (arg) = arg;
2954 if (DECL_INITIAL (decl) != NULL_TREE
2955 && DECL_INITIAL (decl) != error_mark_node)
2956 set_block_origin_self (DECL_INITIAL (decl));
2961 /* Given a pointer to some BLOCK node, and a boolean value to set the
2962 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2963 the given block, and for all local decls and all local sub-blocks
2964 (recursively) which are contained therein. */
2966 static void
2967 set_block_abstract_flags (stmt, setting)
2968 tree stmt;
2969 int setting;
2971 tree local_decl;
2972 tree subblock;
2974 BLOCK_ABSTRACT (stmt) = setting;
2976 for (local_decl = BLOCK_VARS (stmt);
2977 local_decl != NULL_TREE;
2978 local_decl = TREE_CHAIN (local_decl))
2979 set_decl_abstract_flags (local_decl, setting);
2981 for (subblock = BLOCK_SUBBLOCKS (stmt);
2982 subblock != NULL_TREE;
2983 subblock = BLOCK_CHAIN (subblock))
2984 set_block_abstract_flags (subblock, setting);
2987 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2988 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2989 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2990 set the abstract flags for all of the parameters, local vars, local
2991 blocks and sub-blocks (recursively) to the same setting. */
2993 void
2994 set_decl_abstract_flags (decl, setting)
2995 tree decl;
2996 int setting;
2998 DECL_ABSTRACT (decl) = setting;
2999 if (TREE_CODE (decl) == FUNCTION_DECL)
3001 tree arg;
3003 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3004 DECL_ABSTRACT (arg) = setting;
3005 if (DECL_INITIAL (decl) != NULL_TREE
3006 && DECL_INITIAL (decl) != error_mark_node)
3007 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3011 /* Output the assembly language code for the function FNDECL
3012 from its DECL_SAVED_INSNS. Used for inline functions that are output
3013 at end of compilation instead of where they came in the source. */
3015 static GTY(()) struct function *old_cfun;
3017 void
3018 output_inline_function (fndecl)
3019 tree fndecl;
3021 enum debug_info_type old_write_symbols = write_symbols;
3022 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3023 struct function *f = DECL_SAVED_INSNS (fndecl);
3025 old_cfun = cfun;
3026 cfun = f;
3027 current_function_decl = fndecl;
3029 set_new_last_label_num (f->inl_max_label_num);
3031 /* We're not deferring this any longer. */
3032 DECL_DEFER_OUTPUT (fndecl) = 0;
3034 /* If requested, suppress debugging information. */
3035 if (f->no_debugging_symbols)
3037 write_symbols = NO_DEBUG;
3038 debug_hooks = &do_nothing_debug_hooks;
3041 /* Make sure warnings emitted by the optimizers (e.g. control reaches
3042 end of non-void function) is not wildly incorrect. */
3043 input_filename = DECL_SOURCE_FILE (fndecl);
3044 lineno = DECL_SOURCE_LINE (fndecl);
3046 /* Compile this function all the way down to assembly code. As a
3047 side effect this destroys the saved RTL representation, but
3048 that's okay, because we don't need to inline this anymore. */
3049 rest_of_compilation (fndecl);
3050 DECL_INLINE (fndecl) = 0;
3052 cfun = old_cfun;
3053 current_function_decl = old_cfun ? old_cfun->decl : 0;
3054 write_symbols = old_write_symbols;
3055 debug_hooks = old_debug_hooks;
3059 /* Functions to keep track of the values hard regs had at the start of
3060 the function. */
3063 get_hard_reg_initial_reg (fun, reg)
3064 struct function *fun;
3065 rtx reg;
3067 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3068 int i;
3070 if (ivs == 0)
3071 return NULL_RTX;
3073 for (i = 0; i < ivs->num_entries; i++)
3074 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3075 return ivs->entries[i].hard_reg;
3077 return NULL_RTX;
3081 has_func_hard_reg_initial_val (fun, reg)
3082 struct function *fun;
3083 rtx reg;
3085 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3086 int i;
3088 if (ivs == 0)
3089 return NULL_RTX;
3091 for (i = 0; i < ivs->num_entries; i++)
3092 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3093 return ivs->entries[i].pseudo;
3095 return NULL_RTX;
3099 get_func_hard_reg_initial_val (fun, reg)
3100 struct function *fun;
3101 rtx reg;
3103 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3104 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3106 if (rv)
3107 return rv;
3109 if (ivs == 0)
3111 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3112 ivs = fun->hard_reg_initial_vals;
3113 ivs->num_entries = 0;
3114 ivs->max_entries = 5;
3115 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3118 if (ivs->num_entries >= ivs->max_entries)
3120 ivs->max_entries += 5;
3121 ivs->entries =
3122 (initial_value_pair *) ggc_realloc (ivs->entries,
3123 ivs->max_entries
3124 * sizeof (initial_value_pair));
3127 ivs->entries[ivs->num_entries].hard_reg = reg;
3128 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3130 return ivs->entries[ivs->num_entries++].pseudo;
3134 get_hard_reg_initial_val (mode, regno)
3135 enum machine_mode mode;
3136 int regno;
3138 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3142 has_hard_reg_initial_val (mode, regno)
3143 enum machine_mode mode;
3144 int regno;
3146 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3149 static void
3150 setup_initial_hard_reg_value_integration (inl_f, remap)
3151 struct function *inl_f;
3152 struct inline_remap *remap;
3154 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3155 int i;
3157 if (ivs == 0)
3158 return;
3160 for (i = 0; i < ivs->num_entries; i ++)
3161 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3162 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3166 void
3167 emit_initial_value_sets ()
3169 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3170 int i;
3171 rtx seq;
3173 if (ivs == 0)
3174 return;
3176 start_sequence ();
3177 for (i = 0; i < ivs->num_entries; i++)
3178 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3179 seq = get_insns ();
3180 end_sequence ();
3182 emit_insn_after (seq, get_insns ());
3185 /* If the backend knows where to allocate pseudos for hard
3186 register initial values, register these allocations now. */
3187 void
3188 allocate_initial_values (reg_equiv_memory_loc)
3189 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3191 #ifdef ALLOCATE_INITIAL_VALUE
3192 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3193 int i;
3195 if (ivs == 0)
3196 return;
3198 for (i = 0; i < ivs->num_entries; i++)
3200 int regno = REGNO (ivs->entries[i].pseudo);
3201 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3203 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3204 ; /* Do nothing. */
3205 else if (GET_CODE (x) == MEM)
3206 reg_equiv_memory_loc[regno] = x;
3207 else if (GET_CODE (x) == REG)
3209 reg_renumber[regno] = REGNO (x);
3210 /* Poke the regno right into regno_reg_rtx
3211 so that even fixed regs are accepted. */
3212 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3214 else abort ();
3216 #endif
3219 #include "gt-integrate.h"