FSF GCC merge 02/23/03
[official-gcc.git] / gcc / integrate.c
blob5f050ff21c96e35aa3bf9838f6ccb5770ccfaef1
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
77 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
79 static rtvec initialize_for_inline PARAMS ((tree));
80 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
81 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
82 rtvec));
83 static tree integrate_decl_tree PARAMS ((tree,
84 struct inline_remap *));
85 static void subst_constants PARAMS ((rtx *, rtx,
86 struct inline_remap *, int));
87 static void set_block_origin_self PARAMS ((tree));
88 static void set_block_abstract_flags PARAMS ((tree, int));
89 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
90 rtx));
91 void set_decl_abstract_flags PARAMS ((tree, int));
92 static void mark_stores PARAMS ((rtx, rtx, void *));
93 static void save_parm_insns PARAMS ((rtx, rtx));
94 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
95 rtx));
96 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
97 int));
98 static int compare_blocks PARAMS ((const PTR, const PTR));
99 static int find_block PARAMS ((const PTR, const PTR));
101 /* Used by copy_rtx_and_substitute; this indicates whether the function is
102 called for the purpose of inlining or some other purpose (i.e. loop
103 unrolling). This affects how constant pool references are handled.
104 This variable contains the FUNCTION_DECL for the inlined function. */
105 static struct function *inlining = 0;
107 /* Returns the Ith entry in the label_map contained in MAP. If the
108 Ith entry has not yet been set, return a fresh label. This function
109 performs a lazy initialization of label_map, thereby avoiding huge memory
110 explosions when the label_map gets very large. */
113 get_label_from_map (map, i)
114 struct inline_remap *map;
115 int i;
117 rtx x = map->label_map[i];
119 if (x == NULL_RTX)
120 x = map->label_map[i] = gen_label_rtx ();
122 return x;
125 /* Return false if the function FNDECL cannot be inlined on account of its
126 attributes, true otherwise. */
127 bool
128 function_attribute_inlinable_p (fndecl)
129 tree fndecl;
131 if (targetm.attribute_table)
133 tree a;
135 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
137 tree name = TREE_PURPOSE (a);
138 int i;
140 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
141 if (is_attribute_p (targetm.attribute_table[i].name, name))
142 return (*targetm.function_attribute_inlinable_p) (fndecl);
146 return true;
149 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
150 is safe and reasonable to integrate into other functions.
151 Nonzero means value is a warning msgid with a single %s
152 for the function's name. */
154 const char *
155 function_cannot_inline_p (fndecl)
156 tree fndecl;
158 rtx insn;
159 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
161 /* For functions marked as inline increase the maximum size to
162 MAX_INLINE_INSNS (-finline-limit-<n>). For regular functions
163 use the limit given by INTEGRATE_THRESHOLD. */
165 int max_insns = (DECL_INLINE (fndecl))
166 ? (MAX_INLINE_INSNS
167 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
168 : INTEGRATE_THRESHOLD (fndecl);
170 int ninsns = 0;
171 tree parms;
173 if (DECL_UNINLINABLE (fndecl))
174 return N_("function cannot be inline");
176 /* No inlines with varargs. */
177 if (last && TREE_VALUE (last) != void_type_node)
178 return N_("varargs function cannot be inline");
180 if (current_function_calls_alloca)
181 return N_("function using alloca cannot be inline");
183 if (current_function_calls_setjmp)
184 return N_("function using setjmp cannot be inline");
186 if (current_function_calls_eh_return)
187 return N_("function uses __builtin_eh_return");
189 if (current_function_contains_functions)
190 return N_("function with nested functions cannot be inline");
192 if (forced_labels)
193 return
194 N_("function with label addresses used in initializers cannot inline");
196 if (current_function_cannot_inline)
197 return current_function_cannot_inline;
199 /* If its not even close, don't even look. */
200 if (get_max_uid () > 3 * max_insns)
201 return N_("function too large to be inline");
203 #if 0
204 /* Don't inline functions which do not specify a function prototype and
205 have BLKmode argument or take the address of a parameter. */
206 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
208 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
209 TREE_ADDRESSABLE (parms) = 1;
210 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
211 return N_("no prototype, and parameter address used; cannot be inline");
213 #endif
215 /* We can't inline functions that return structures
216 the old-fashioned PCC way, copying into a static block. */
217 if (current_function_returns_pcc_struct)
218 return N_("inline functions not supported for this return value type");
220 /* We can't inline functions that return structures of varying size. */
221 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
222 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
223 return N_("function with varying-size return value cannot be inline");
225 /* Cannot inline a function with a varying size argument or one that
226 receives a transparent union. */
227 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
229 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
230 return N_("function with varying-size parameter cannot be inline");
231 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
232 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
233 return N_("function with transparent unit parameter cannot be inline");
236 if (get_max_uid () > max_insns)
238 for (ninsns = 0, insn = get_first_nonparm_insn ();
239 insn && ninsns < max_insns;
240 insn = NEXT_INSN (insn))
241 if (INSN_P (insn))
242 ninsns++;
244 if (ninsns >= max_insns)
245 return N_("function too large to be inline");
248 /* We will not inline a function which uses computed goto. The addresses of
249 its local labels, which may be tucked into global storage, are of course
250 not constant across instantiations, which causes unexpected behavior. */
251 if (current_function_has_computed_jump)
252 return N_("function with computed jump cannot inline");
254 /* We cannot inline a nested function that jumps to a nonlocal label. */
255 if (current_function_has_nonlocal_goto)
256 return N_("function with nonlocal goto cannot be inline");
258 /* We can't inline functions that return a PARALLEL rtx. */
259 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
261 rtx result = DECL_RTL (DECL_RESULT (fndecl));
262 if (GET_CODE (result) == PARALLEL)
263 return N_("inline functions not supported for this return value type");
266 /* If the function has a target specific attribute attached to it,
267 then we assume that we should not inline it. This can be overridden
268 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
269 if (!function_attribute_inlinable_p (fndecl))
270 return N_("function with target specific attribute(s) cannot be inlined");
272 return NULL;
275 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
276 Zero for a reg that isn't a parm's home.
277 Only reg numbers less than max_parm_reg are mapped here. */
278 static tree *parmdecl_map;
280 /* In save_for_inline, nonzero if past the parm-initialization insns. */
281 static int in_nonparm_insns;
283 /* Subroutine for `save_for_inline'. Performs initialization
284 needed to save FNDECL's insns and info for future inline expansion. */
286 static rtvec
287 initialize_for_inline (fndecl)
288 tree fndecl;
290 int i;
291 rtvec arg_vector;
292 tree parms;
294 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
295 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
296 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
298 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
299 parms;
300 parms = TREE_CHAIN (parms), i++)
302 rtx p = DECL_RTL (parms);
304 /* If we have (mem (addressof (mem ...))), use the inner MEM since
305 otherwise the copy_rtx call below will not unshare the MEM since
306 it shares ADDRESSOF. */
307 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
308 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
309 p = XEXP (XEXP (p, 0), 0);
311 RTVEC_ELT (arg_vector, i) = p;
313 if (GET_CODE (p) == REG)
314 parmdecl_map[REGNO (p)] = parms;
315 else if (GET_CODE (p) == CONCAT)
317 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
318 rtx pimag = gen_imagpart (GET_MODE (preal), p);
320 if (GET_CODE (preal) == REG)
321 parmdecl_map[REGNO (preal)] = parms;
322 if (GET_CODE (pimag) == REG)
323 parmdecl_map[REGNO (pimag)] = parms;
326 /* This flag is cleared later
327 if the function ever modifies the value of the parm. */
328 TREE_READONLY (parms) = 1;
331 return arg_vector;
334 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
335 originally was in the FROM_FN, but now it will be in the
336 TO_FN. */
338 tree
339 copy_decl_for_inlining (decl, from_fn, to_fn)
340 tree decl;
341 tree from_fn;
342 tree to_fn;
344 tree copy;
346 /* Copy the declaration. */
347 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
349 tree type;
350 int invisiref = 0;
352 /* See if the frontend wants to pass this by invisible reference. */
353 if (TREE_CODE (decl) == PARM_DECL
354 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
355 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
356 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
358 invisiref = 1;
359 type = DECL_ARG_TYPE (decl);
361 else
362 type = TREE_TYPE (decl);
364 /* For a parameter, we must make an equivalent VAR_DECL, not a
365 new PARM_DECL. */
366 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
367 if (!invisiref)
369 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
370 TREE_READONLY (copy) = TREE_READONLY (decl);
371 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
373 else
375 TREE_ADDRESSABLE (copy) = 0;
376 TREE_READONLY (copy) = 1;
377 TREE_THIS_VOLATILE (copy) = 0;
380 else
382 copy = copy_node (decl);
383 (*lang_hooks.dup_lang_specific_decl) (copy);
385 /* TREE_ADDRESSABLE isn't used to indicate that a label's
386 address has been taken; it's for internal bookkeeping in
387 expand_goto_internal. */
388 if (TREE_CODE (copy) == LABEL_DECL)
389 TREE_ADDRESSABLE (copy) = 0;
392 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
393 declaration inspired this copy. */
394 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
396 /* The new variable/label has no RTL, yet. */
397 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
398 SET_DECL_RTL (copy, NULL_RTX);
400 /* These args would always appear unused, if not for this. */
401 TREE_USED (copy) = 1;
403 /* Set the context for the new declaration. */
404 if (!DECL_CONTEXT (decl))
405 /* Globals stay global. */
407 else if (DECL_CONTEXT (decl) != from_fn)
408 /* Things that weren't in the scope of the function we're inlining
409 from aren't in the scope we're inlining to, either. */
411 else if (TREE_STATIC (decl))
412 /* Function-scoped static variables should stay in the original
413 function. */
415 else
416 /* Ordinary automatic local variables are now in the scope of the
417 new function. */
418 DECL_CONTEXT (copy) = to_fn;
420 return copy;
423 /* Make the insns and PARM_DECLs of the current function permanent
424 and record other information in DECL_SAVED_INSNS to allow inlining
425 of this function in subsequent calls.
427 This routine need not copy any insns because we are not going
428 to immediately compile the insns in the insn chain. There
429 are two cases when we would compile the insns for FNDECL:
430 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
431 be output at the end of other compilation, because somebody took
432 its address. In the first case, the insns of FNDECL are copied
433 as it is expanded inline, so FNDECL's saved insns are not
434 modified. In the second case, FNDECL is used for the last time,
435 so modifying the rtl is not a problem.
437 We don't have to worry about FNDECL being inline expanded by
438 other functions which are written at the end of compilation
439 because flag_no_inline is turned on when we begin writing
440 functions at the end of compilation. */
442 void
443 save_for_inline (fndecl)
444 tree fndecl;
446 rtx insn;
447 rtvec argvec;
448 rtx first_nonparm_insn;
450 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
451 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
452 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
453 for the parms, prior to elimination of virtual registers.
454 These values are needed for substituting parms properly. */
455 if (! flag_no_inline)
456 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
458 /* Make and emit a return-label if we have not already done so. */
460 if (return_label == 0)
462 return_label = gen_label_rtx ();
463 emit_label (return_label);
466 if (! flag_no_inline)
467 argvec = initialize_for_inline (fndecl);
468 else
469 argvec = NULL;
471 /* Delete basic block notes created by early run of find_basic_block.
472 The notes would be later used by find_basic_blocks to reuse the memory
473 for basic_block structures on already freed obstack. */
474 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
475 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
476 delete_related_insns (insn);
478 /* If there are insns that copy parms from the stack into pseudo registers,
479 those insns are not copied. `expand_inline_function' must
480 emit the correct code to handle such things. */
482 insn = get_insns ();
483 if (GET_CODE (insn) != NOTE)
484 abort ();
486 if (! flag_no_inline)
488 /* Get the insn which signals the end of parameter setup code. */
489 first_nonparm_insn = get_first_nonparm_insn ();
491 /* Now just scan the chain of insns to see what happens to our
492 PARM_DECLs. If a PARM_DECL is used but never modified, we
493 can substitute its rtl directly when expanding inline (and
494 perform constant folding when its incoming value is
495 constant). Otherwise, we have to copy its value into a new
496 register and track the new register's life. */
497 in_nonparm_insns = 0;
498 save_parm_insns (insn, first_nonparm_insn);
500 cfun->inl_max_label_num = max_label_num ();
501 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
502 cfun->original_arg_vector = argvec;
504 cfun->original_decl_initial = DECL_INITIAL (fndecl);
505 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
506 DECL_SAVED_INSNS (fndecl) = cfun;
508 /* Clean up. */
509 if (! flag_no_inline)
510 free (parmdecl_map);
513 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
514 PARM_DECL is used but never modified, we can substitute its rtl directly
515 when expanding inline (and perform constant folding when its incoming
516 value is constant). Otherwise, we have to copy its value into a new
517 register and track the new register's life. */
519 static void
520 save_parm_insns (insn, first_nonparm_insn)
521 rtx insn;
522 rtx first_nonparm_insn;
524 if (insn == NULL_RTX)
525 return;
527 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
529 if (insn == first_nonparm_insn)
530 in_nonparm_insns = 1;
532 if (INSN_P (insn))
534 /* Record what interesting things happen to our parameters. */
535 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
537 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
538 three attached sequences: normal call, sibling call and tail
539 recursion. */
540 if (GET_CODE (insn) == CALL_INSN
541 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
543 int i;
545 for (i = 0; i < 3; i++)
546 save_parm_insns (XEXP (PATTERN (insn), i),
547 first_nonparm_insn);
553 /* Note whether a parameter is modified or not. */
555 static void
556 note_modified_parmregs (reg, x, data)
557 rtx reg;
558 rtx x ATTRIBUTE_UNUSED;
559 void *data ATTRIBUTE_UNUSED;
561 if (GET_CODE (reg) == REG && in_nonparm_insns
562 && REGNO (reg) < max_parm_reg
563 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
564 && parmdecl_map[REGNO (reg)] != 0)
565 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
568 /* Unfortunately, we need a global copy of const_equiv map for communication
569 with a function called from note_stores. Be *very* careful that this
570 is used properly in the presence of recursion. */
572 varray_type global_const_equiv_varray;
574 #define FIXED_BASE_PLUS_P(X) \
575 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
576 && GET_CODE (XEXP (X, 0)) == REG \
577 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
578 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
580 /* Called to set up a mapping for the case where a parameter is in a
581 register. If it is read-only and our argument is a constant, set up the
582 constant equivalence.
584 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
585 if it is a register.
587 Also, don't allow hard registers here; they might not be valid when
588 substituted into insns. */
589 static void
590 process_reg_param (map, loc, copy)
591 struct inline_remap *map;
592 rtx loc, copy;
594 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
595 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
596 && ! REG_USERVAR_P (copy))
597 || (GET_CODE (copy) == REG
598 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
600 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
601 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
602 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
603 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
604 copy = temp;
606 map->reg_map[REGNO (loc)] = copy;
609 /* Compare two BLOCKs for qsort. The key we sort on is the
610 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
611 two pointers, because it may overflow sizeof(int). */
613 static int
614 compare_blocks (v1, v2)
615 const PTR v1;
616 const PTR v2;
618 tree b1 = *((const tree *) v1);
619 tree b2 = *((const tree *) v2);
620 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
621 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
623 if (p1 == p2)
624 return 0;
625 return p1 < p2 ? -1 : 1;
628 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
629 an original block; the second to a remapped equivalent. */
631 static int
632 find_block (v1, v2)
633 const PTR v1;
634 const PTR v2;
636 const union tree_node *b1 = (const union tree_node *) v1;
637 tree b2 = *((const tree *) v2);
638 char *p1 = (char *) b1;
639 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
641 if (p1 == p2)
642 return 0;
643 return p1 < p2 ? -1 : 1;
646 /* Integrate the procedure defined by FNDECL. Note that this function
647 may wind up calling itself. Since the static variables are not
648 reentrant, we do not assign them until after the possibility
649 of recursion is eliminated.
651 If IGNORE is nonzero, do not produce a value.
652 Otherwise store the value in TARGET if it is nonzero and that is convenient.
654 Value is:
655 (rtx)-1 if we could not substitute the function
656 0 if we substituted it and it does not produce a value
657 else an rtx for where the value is stored. */
660 expand_inline_function (fndecl, parms, target, ignore, type,
661 structure_value_addr)
662 tree fndecl, parms;
663 rtx target;
664 int ignore;
665 tree type;
666 rtx structure_value_addr;
668 struct function *inlining_previous;
669 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
670 tree formal, actual, block;
671 rtx parm_insns = inl_f->emit->x_first_insn;
672 rtx insns = (inl_f->inl_last_parm_insn
673 ? NEXT_INSN (inl_f->inl_last_parm_insn)
674 : parm_insns);
675 tree *arg_trees;
676 rtx *arg_vals;
677 int max_regno;
678 int i;
679 int min_labelno = inl_f->emit->x_first_label_num;
680 int max_labelno = inl_f->inl_max_label_num;
681 int nargs;
682 rtx loc;
683 rtx stack_save = 0;
684 rtx temp;
685 struct inline_remap *map = 0;
686 rtvec arg_vector = inl_f->original_arg_vector;
687 rtx static_chain_value = 0;
688 int inl_max_uid;
689 int eh_region_offset;
691 /* The pointer used to track the true location of the memory used
692 for MAP->LABEL_MAP. */
693 rtx *real_label_map = 0;
695 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
696 max_regno = inl_f->emit->x_reg_rtx_no + 3;
697 if (max_regno < FIRST_PSEUDO_REGISTER)
698 abort ();
700 /* Pull out the decl for the function definition; fndecl may be a
701 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
702 fndecl = inl_f->decl;
704 nargs = list_length (DECL_ARGUMENTS (fndecl));
706 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
707 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
709 /* Check that the parms type match and that sufficient arguments were
710 passed. Since the appropriate conversions or default promotions have
711 already been applied, the machine modes should match exactly. */
713 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
714 formal;
715 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
717 tree arg;
718 enum machine_mode mode;
720 if (actual == 0)
721 return (rtx) (size_t) -1;
723 arg = TREE_VALUE (actual);
724 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
726 if (arg == error_mark_node
727 || mode != TYPE_MODE (TREE_TYPE (arg))
728 /* If they are block mode, the types should match exactly.
729 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
730 which could happen if the parameter has incomplete type. */
731 || (mode == BLKmode
732 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
733 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
734 return (rtx) (size_t) -1;
737 /* Extra arguments are valid, but will be ignored below, so we must
738 evaluate them here for side-effects. */
739 for (; actual; actual = TREE_CHAIN (actual))
740 expand_expr (TREE_VALUE (actual), const0_rtx,
741 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
743 /* Expand the function arguments. Do this first so that any
744 new registers get created before we allocate the maps. */
746 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
747 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
749 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
750 formal;
751 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
753 /* Actual parameter, converted to the type of the argument within the
754 function. */
755 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
756 /* Mode of the variable used within the function. */
757 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
758 int invisiref = 0;
760 arg_trees[i] = arg;
761 loc = RTVEC_ELT (arg_vector, i);
763 /* If this is an object passed by invisible reference, we copy the
764 object into a stack slot and save its address. If this will go
765 into memory, we do nothing now. Otherwise, we just expand the
766 argument. */
767 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
768 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
770 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
772 store_expr (arg, stack_slot, 0);
773 arg_vals[i] = XEXP (stack_slot, 0);
774 invisiref = 1;
776 else if (GET_CODE (loc) != MEM)
778 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
780 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
781 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
783 pmode = promote_mode (TREE_TYPE (formal), pmode,
784 &unsignedp, 0);
786 if (GET_MODE (loc) != pmode)
787 abort ();
789 /* The mode if LOC and ARG can differ if LOC was a variable
790 that had its mode promoted via PROMOTED_MODE. */
791 arg_vals[i] = convert_modes (pmode,
792 TYPE_MODE (TREE_TYPE (arg)),
793 expand_expr (arg, NULL_RTX, mode,
794 EXPAND_SUM),
795 unsignedp);
797 else
798 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
800 else
801 arg_vals[i] = 0;
803 if (arg_vals[i] != 0
804 && (! TREE_READONLY (formal)
805 /* If the parameter is not read-only, copy our argument through
806 a register. Also, we cannot use ARG_VALS[I] if it overlaps
807 TARGET in any way. In the inline function, they will likely
808 be two different pseudos, and `safe_from_p' will make all
809 sorts of smart assumptions about their not conflicting.
810 But if ARG_VALS[I] overlaps TARGET, these assumptions are
811 wrong, so put ARG_VALS[I] into a fresh register.
812 Don't worry about invisible references, since their stack
813 temps will never overlap the target. */
814 || (target != 0
815 && ! invisiref
816 && (GET_CODE (arg_vals[i]) == REG
817 || GET_CODE (arg_vals[i]) == SUBREG
818 || GET_CODE (arg_vals[i]) == MEM)
819 && reg_overlap_mentioned_p (arg_vals[i], target))
820 /* ??? We must always copy a SUBREG into a REG, because it might
821 get substituted into an address, and not all ports correctly
822 handle SUBREGs in addresses. */
823 || (GET_CODE (arg_vals[i]) == SUBREG)))
824 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
826 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
827 && POINTER_TYPE_P (TREE_TYPE (formal)))
828 mark_reg_pointer (arg_vals[i],
829 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
832 /* Allocate the structures we use to remap things. */
834 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
835 map->fndecl = fndecl;
837 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
838 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
840 /* We used to use alloca here, but the size of what it would try to
841 allocate would occasionally cause it to exceed the stack limit and
842 cause unpredictable core dumps. */
843 real_label_map
844 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
845 map->label_map = real_label_map;
846 map->local_return_label = NULL_RTX;
848 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
849 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
850 map->min_insnno = 0;
851 map->max_insnno = inl_max_uid;
853 map->integrating = 1;
854 map->compare_src = NULL_RTX;
855 map->compare_mode = VOIDmode;
857 /* const_equiv_varray maps pseudos in our routine to constants, so
858 it needs to be large enough for all our pseudos. This is the
859 number we are currently using plus the number in the called
860 routine, plus 15 for each arg, five to compute the virtual frame
861 pointer, and five for the return value. This should be enough
862 for most cases. We do not reference entries outside the range of
863 the map.
865 ??? These numbers are quite arbitrary and were obtained by
866 experimentation. At some point, we should try to allocate the
867 table after all the parameters are set up so we can more accurately
868 estimate the number of pseudos we will need. */
870 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
871 (max_reg_num ()
872 + (max_regno - FIRST_PSEUDO_REGISTER)
873 + 15 * nargs
874 + 10),
875 "expand_inline_function");
876 map->const_age = 0;
878 /* Record the current insn in case we have to set up pointers to frame
879 and argument memory blocks. If there are no insns yet, add a dummy
880 insn that can be used as an insertion point. */
881 map->insns_at_start = get_last_insn ();
882 if (map->insns_at_start == 0)
883 map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
885 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
886 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
888 /* Update the outgoing argument size to allow for those in the inlined
889 function. */
890 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
891 current_function_outgoing_args_size = inl_f->outgoing_args_size;
893 /* If the inline function needs to make PIC references, that means
894 that this function's PIC offset table must be used. */
895 if (inl_f->uses_pic_offset_table)
896 current_function_uses_pic_offset_table = 1;
898 /* If this function needs a context, set it up. */
899 if (inl_f->needs_context)
900 static_chain_value = lookup_static_chain (fndecl);
902 /* If the inlined function calls __builtin_constant_p, then we'll
903 need to call purge_builtin_constant_p on this function. */
904 if (inl_f->calls_constant_p)
905 current_function_calls_constant_p = 1;
907 if (GET_CODE (parm_insns) == NOTE
908 && NOTE_LINE_NUMBER (parm_insns) > 0)
910 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
911 NOTE_LINE_NUMBER (parm_insns));
912 if (note)
913 RTX_INTEGRATED_P (note) = 1;
916 /* Process each argument. For each, set up things so that the function's
917 reference to the argument will refer to the argument being passed.
918 We only replace REG with REG here. Any simplifications are done
919 via const_equiv_map.
921 We make two passes: In the first, we deal with parameters that will
922 be placed into registers, since we need to ensure that the allocated
923 register number fits in const_equiv_map. Then we store all non-register
924 parameters into their memory location. */
926 /* Don't try to free temp stack slots here, because we may put one of the
927 parameters into a temp stack slot. */
929 for (i = 0; i < nargs; i++)
931 rtx copy = arg_vals[i];
933 loc = RTVEC_ELT (arg_vector, i);
935 /* There are three cases, each handled separately. */
936 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
937 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
939 /* This must be an object passed by invisible reference (it could
940 also be a variable-sized object, but we forbid inlining functions
941 with variable-sized arguments). COPY is the address of the
942 actual value (this computation will cause it to be copied). We
943 map that address for the register, noting the actual address as
944 an equivalent in case it can be substituted into the insns. */
946 if (GET_CODE (copy) != REG)
948 temp = copy_addr_to_reg (copy);
949 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
950 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
951 copy = temp;
953 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
955 else if (GET_CODE (loc) == MEM)
957 /* This is the case of a parameter that lives in memory. It
958 will live in the block we allocate in the called routine's
959 frame that simulates the incoming argument area. Do nothing
960 with the parameter now; we will call store_expr later. In
961 this case, however, we must ensure that the virtual stack and
962 incoming arg rtx values are expanded now so that we can be
963 sure we have enough slots in the const equiv map since the
964 store_expr call can easily blow the size estimate. */
965 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
966 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
968 else if (GET_CODE (loc) == REG)
969 process_reg_param (map, loc, copy);
970 else if (GET_CODE (loc) == CONCAT)
972 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
973 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
974 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
975 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
977 process_reg_param (map, locreal, copyreal);
978 process_reg_param (map, locimag, copyimag);
980 else
981 abort ();
984 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
985 specially. This function can be called recursively, so we need to
986 save the previous value. */
987 inlining_previous = inlining;
988 inlining = inl_f;
990 /* Now do the parameters that will be placed in memory. */
992 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
993 formal; formal = TREE_CHAIN (formal), i++)
995 loc = RTVEC_ELT (arg_vector, i);
997 if (GET_CODE (loc) == MEM
998 /* Exclude case handled above. */
999 && ! (GET_CODE (XEXP (loc, 0)) == REG
1000 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1002 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1003 DECL_SOURCE_LINE (formal));
1004 if (note)
1005 RTX_INTEGRATED_P (note) = 1;
1007 /* Compute the address in the area we reserved and store the
1008 value there. */
1009 temp = copy_rtx_and_substitute (loc, map, 1);
1010 subst_constants (&temp, NULL_RTX, map, 1);
1011 apply_change_group ();
1012 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1013 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1014 store_expr (arg_trees[i], temp, 0);
1018 /* Deal with the places that the function puts its result.
1019 We are driven by what is placed into DECL_RESULT.
1021 Initially, we assume that we don't have anything special handling for
1022 REG_FUNCTION_RETURN_VALUE_P. */
1024 map->inline_target = 0;
1025 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1026 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1028 if (TYPE_MODE (type) == VOIDmode)
1029 /* There is no return value to worry about. */
1031 else if (GET_CODE (loc) == MEM)
1033 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1035 temp = copy_rtx_and_substitute (loc, map, 1);
1036 subst_constants (&temp, NULL_RTX, map, 1);
1037 apply_change_group ();
1038 target = temp;
1040 else
1042 if (! structure_value_addr
1043 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1044 abort ();
1046 /* Pass the function the address in which to return a structure
1047 value. Note that a constructor can cause someone to call us
1048 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1049 via the first parameter, rather than the struct return address.
1051 We have two cases: If the address is a simple register
1052 indirect, use the mapping mechanism to point that register to
1053 our structure return address. Otherwise, store the structure
1054 return value into the place that it will be referenced from. */
1056 if (GET_CODE (XEXP (loc, 0)) == REG)
1058 temp = force_operand (structure_value_addr, NULL_RTX);
1059 temp = force_reg (Pmode, temp);
1060 /* A virtual register might be invalid in an insn, because
1061 it can cause trouble in reload. Since we don't have access
1062 to the expanders at map translation time, make sure we have
1063 a proper register now.
1064 If a virtual register is actually valid, cse or combine
1065 can put it into the mapped insns. */
1066 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1067 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1068 temp = copy_to_mode_reg (Pmode, temp);
1069 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1071 if (CONSTANT_P (structure_value_addr)
1072 || GET_CODE (structure_value_addr) == ADDRESSOF
1073 || (GET_CODE (structure_value_addr) == PLUS
1074 && (XEXP (structure_value_addr, 0)
1075 == virtual_stack_vars_rtx)
1076 && (GET_CODE (XEXP (structure_value_addr, 1))
1077 == CONST_INT)))
1079 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1080 CONST_AGE_PARM);
1083 else
1085 temp = copy_rtx_and_substitute (loc, map, 1);
1086 subst_constants (&temp, NULL_RTX, map, 0);
1087 apply_change_group ();
1088 emit_move_insn (temp, structure_value_addr);
1092 else if (ignore)
1093 /* We will ignore the result value, so don't look at its structure.
1094 Note that preparations for an aggregate return value
1095 do need to be made (above) even if it will be ignored. */
1097 else if (GET_CODE (loc) == REG)
1099 /* The function returns an object in a register and we use the return
1100 value. Set up our target for remapping. */
1102 /* Machine mode function was declared to return. */
1103 enum machine_mode departing_mode = TYPE_MODE (type);
1104 /* (Possibly wider) machine mode it actually computes
1105 (for the sake of callers that fail to declare it right).
1106 We have to use the mode of the result's RTL, rather than
1107 its type, since expand_function_start may have promoted it. */
1108 enum machine_mode arriving_mode
1109 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1110 rtx reg_to_map;
1112 /* Don't use MEMs as direct targets because on some machines
1113 substituting a MEM for a REG makes invalid insns.
1114 Let the combiner substitute the MEM if that is valid. */
1115 if (target == 0 || GET_CODE (target) != REG
1116 || GET_MODE (target) != departing_mode)
1118 /* Don't make BLKmode registers. If this looks like
1119 a BLKmode object being returned in a register, get
1120 the mode from that, otherwise abort. */
1121 if (departing_mode == BLKmode)
1123 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1125 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1126 arriving_mode = departing_mode;
1128 else
1129 abort ();
1132 target = gen_reg_rtx (departing_mode);
1135 /* If function's value was promoted before return,
1136 avoid machine mode mismatch when we substitute INLINE_TARGET.
1137 But TARGET is what we will return to the caller. */
1138 if (arriving_mode != departing_mode)
1140 /* Avoid creating a paradoxical subreg wider than
1141 BITS_PER_WORD, since that is illegal. */
1142 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1144 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1145 GET_MODE_BITSIZE (arriving_mode)))
1146 /* Maybe could be handled by using convert_move () ? */
1147 abort ();
1148 reg_to_map = gen_reg_rtx (arriving_mode);
1149 target = gen_lowpart (departing_mode, reg_to_map);
1151 else
1152 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1154 else
1155 reg_to_map = target;
1157 /* Usually, the result value is the machine's return register.
1158 Sometimes it may be a pseudo. Handle both cases. */
1159 if (REG_FUNCTION_VALUE_P (loc))
1160 map->inline_target = reg_to_map;
1161 else
1162 map->reg_map[REGNO (loc)] = reg_to_map;
1164 else if (GET_CODE (loc) == CONCAT)
1166 enum machine_mode departing_mode = TYPE_MODE (type);
1167 enum machine_mode arriving_mode
1168 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1170 if (departing_mode != arriving_mode)
1171 abort ();
1172 if (GET_CODE (XEXP (loc, 0)) != REG
1173 || GET_CODE (XEXP (loc, 1)) != REG)
1174 abort ();
1176 /* Don't use MEMs as direct targets because on some machines
1177 substituting a MEM for a REG makes invalid insns.
1178 Let the combiner substitute the MEM if that is valid. */
1179 if (target == 0 || GET_CODE (target) != REG
1180 || GET_MODE (target) != departing_mode)
1181 target = gen_reg_rtx (departing_mode);
1183 if (GET_CODE (target) != CONCAT)
1184 abort ();
1186 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1187 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1189 else
1190 abort ();
1192 /* Remap the exception handler data pointer from one to the other. */
1193 temp = get_exception_pointer (inl_f);
1194 if (temp)
1195 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1197 /* Initialize label_map. get_label_from_map will actually make
1198 the labels. */
1199 memset ((char *) &map->label_map[min_labelno], 0,
1200 (max_labelno - min_labelno) * sizeof (rtx));
1202 /* Make copies of the decls of the symbols in the inline function, so that
1203 the copies of the variables get declared in the current function. Set
1204 up things so that lookup_static_chain knows that to interpret registers
1205 in SAVE_EXPRs for TYPE_SIZEs as local. */
1206 inline_function_decl = fndecl;
1207 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1208 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1209 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1210 inline_function_decl = 0;
1212 /* Make a fresh binding contour that we can easily remove. Do this after
1213 expanding our arguments so cleanups are properly scoped. */
1214 expand_start_bindings_and_block (0, block);
1216 /* Sort the block-map so that it will be easy to find remapped
1217 blocks later. */
1218 qsort (&VARRAY_TREE (map->block_map, 0),
1219 map->block_map->elements_used,
1220 sizeof (tree),
1221 compare_blocks);
1223 /* Perform postincrements before actually calling the function. */
1224 emit_queue ();
1226 /* Clean up stack so that variables might have smaller offsets. */
1227 do_pending_stack_adjust ();
1229 /* Save a copy of the location of const_equiv_varray for
1230 mark_stores, called via note_stores. */
1231 global_const_equiv_varray = map->const_equiv_varray;
1233 /* If the called function does an alloca, save and restore the
1234 stack pointer around the call. This saves stack space, but
1235 also is required if this inline is being done between two
1236 pushes. */
1237 if (inl_f->calls_alloca)
1238 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1240 /* Map pseudos used for initial hard reg values. */
1241 setup_initial_hard_reg_value_integration (inl_f, map);
1243 /* Now copy the insns one by one. */
1244 copy_insn_list (insns, map, static_chain_value);
1246 /* Duplicate the EH regions. This will create an offset from the
1247 region numbers in the function we're inlining to the region
1248 numbers in the calling function. This must wait until after
1249 copy_insn_list, as we need the insn map to be complete. */
1250 eh_region_offset = duplicate_eh_regions (inl_f, map);
1252 /* Now copy the REG_NOTES for those insns. */
1253 copy_insn_notes (insns, map, eh_region_offset);
1255 /* If the insn sequence required one, emit the return label. */
1256 if (map->local_return_label)
1257 emit_label (map->local_return_label);
1259 /* Restore the stack pointer if we saved it above. */
1260 if (inl_f->calls_alloca)
1261 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1263 if (! cfun->x_whole_function_mode_p)
1264 /* In statement-at-a-time mode, we just tell the front-end to add
1265 this block to the list of blocks at this binding level. We
1266 can't do it the way it's done for function-at-a-time mode the
1267 superblocks have not been created yet. */
1268 (*lang_hooks.decls.insert_block) (block);
1269 else
1271 BLOCK_CHAIN (block)
1272 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1273 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1276 /* End the scope containing the copied formal parameter variables
1277 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1278 here so that expand_end_bindings will not check for unused
1279 variables. That's already been checked for when the inlined
1280 function was defined. */
1281 expand_end_bindings (NULL_TREE, 1, 1);
1283 /* Must mark the line number note after inlined functions as a repeat, so
1284 that the test coverage code can avoid counting the call twice. This
1285 just tells the code to ignore the immediately following line note, since
1286 there already exists a copy of this note before the expanded inline call.
1287 This line number note is still needed for debugging though, so we can't
1288 delete it. */
1289 if (flag_test_coverage)
1290 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1292 emit_line_note (input_filename, lineno);
1294 /* If the function returns a BLKmode object in a register, copy it
1295 out of the temp register into a BLKmode memory object. */
1296 if (target
1297 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1298 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1299 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1301 if (structure_value_addr)
1303 target = gen_rtx_MEM (TYPE_MODE (type),
1304 memory_address (TYPE_MODE (type),
1305 structure_value_addr));
1306 set_mem_attributes (target, type, 1);
1309 /* Make sure we free the things we explicitly allocated with xmalloc. */
1310 if (real_label_map)
1311 free (real_label_map);
1312 VARRAY_FREE (map->const_equiv_varray);
1313 free (map->reg_map);
1314 free (map->insn_map);
1315 free (map);
1316 free (arg_vals);
1317 free (arg_trees);
1319 inlining = inlining_previous;
1321 return target;
1324 /* Make copies of each insn in the given list using the mapping
1325 computed in expand_inline_function. This function may call itself for
1326 insns containing sequences.
1328 Copying is done in two passes, first the insns and then their REG_NOTES.
1330 If static_chain_value is nonzero, it represents the context-pointer
1331 register for the function. */
1333 static void
1334 copy_insn_list (insns, map, static_chain_value)
1335 rtx insns;
1336 struct inline_remap *map;
1337 rtx static_chain_value;
1339 int i;
1340 rtx insn;
1341 rtx temp;
1342 #ifdef HAVE_cc0
1343 rtx cc0_insn = 0;
1344 #endif
1345 rtx static_chain_mem = 0;
1347 /* Copy the insns one by one. Do this in two passes, first the insns and
1348 then their REG_NOTES. */
1350 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1352 for (insn = insns; insn; insn = NEXT_INSN (insn))
1354 rtx copy, pattern, set;
1356 map->orig_asm_operands_vector = 0;
1358 switch (GET_CODE (insn))
1360 case INSN:
1361 pattern = PATTERN (insn);
1362 set = single_set (insn);
1363 copy = 0;
1364 if (GET_CODE (pattern) == USE
1365 && GET_CODE (XEXP (pattern, 0)) == REG
1366 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1367 /* The (USE (REG n)) at return from the function should
1368 be ignored since we are changing (REG n) into
1369 inline_target. */
1370 break;
1372 /* Ignore setting a function value that we don't want to use. */
1373 if (map->inline_target == 0
1374 && set != 0
1375 && GET_CODE (SET_DEST (set)) == REG
1376 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1378 if (volatile_refs_p (SET_SRC (set)))
1380 rtx new_set;
1382 /* If we must not delete the source,
1383 load it into a new temporary. */
1384 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1386 new_set = single_set (copy);
1387 if (new_set == 0)
1388 abort ();
1390 SET_DEST (new_set)
1391 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1393 /* If the source and destination are the same and it
1394 has a note on it, keep the insn. */
1395 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1396 && REG_NOTES (insn) != 0)
1397 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1398 else
1399 break;
1402 /* Similarly if an ignored return value is clobbered. */
1403 else if (map->inline_target == 0
1404 && GET_CODE (pattern) == CLOBBER
1405 && GET_CODE (XEXP (pattern, 0)) == REG
1406 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1407 break;
1409 /* Look for the address of the static chain slot. The
1410 rtx_equal_p comparisons against the
1411 static_chain_incoming_rtx below may fail if the static
1412 chain is in memory and the address specified is not
1413 "legitimate". This happens on Xtensa where the static
1414 chain is at a negative offset from argp and where only
1415 positive offsets are legitimate. When the RTL is
1416 generated, the address is "legitimized" by copying it
1417 into a register, causing the rtx_equal_p comparisons to
1418 fail. This workaround looks for code that sets a
1419 register to the address of the static chain. Subsequent
1420 memory references via that register can then be
1421 identified as static chain references. We assume that
1422 the register is only assigned once, and that the static
1423 chain address is only live in one register at a time. */
1425 else if (static_chain_value != 0
1426 && set != 0
1427 && GET_CODE (static_chain_incoming_rtx) == MEM
1428 && GET_CODE (SET_DEST (set)) == REG
1429 && rtx_equal_p (SET_SRC (set),
1430 XEXP (static_chain_incoming_rtx, 0)))
1432 static_chain_mem =
1433 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1434 SET_DEST (set));
1436 /* emit the instruction in case it is used for something
1437 other than setting the static chain; if it's not used,
1438 it can always be removed as dead code */
1439 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1442 /* If this is setting the static chain rtx, omit it. */
1443 else if (static_chain_value != 0
1444 && set != 0
1445 && (rtx_equal_p (SET_DEST (set),
1446 static_chain_incoming_rtx)
1447 || (static_chain_mem
1448 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1449 break;
1451 /* If this is setting the static chain pseudo, set it from
1452 the value we want to give it instead. */
1453 else if (static_chain_value != 0
1454 && set != 0
1455 && (rtx_equal_p (SET_SRC (set),
1456 static_chain_incoming_rtx)
1457 || (static_chain_mem
1458 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1460 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1462 copy = emit_move_insn (newdest, static_chain_value);
1463 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1464 static_chain_value = 0;
1467 /* If this is setting the virtual stack vars register, this must
1468 be the code at the handler for a builtin longjmp. The value
1469 saved in the setjmp buffer will be the address of the frame
1470 we've made for this inlined instance within our frame. But we
1471 know the offset of that value so we can use it to reconstruct
1472 our virtual stack vars register from that value. If we are
1473 copying it from the stack pointer, leave it unchanged. */
1474 else if (set != 0
1475 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1477 HOST_WIDE_INT offset;
1478 temp = map->reg_map[REGNO (SET_DEST (set))];
1479 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1480 REGNO (temp)).rtx;
1482 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1483 offset = 0;
1484 else if (GET_CODE (temp) == PLUS
1485 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1486 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1487 offset = INTVAL (XEXP (temp, 1));
1488 else
1489 abort ();
1491 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1492 temp = SET_SRC (set);
1493 else
1494 temp = force_operand (plus_constant (SET_SRC (set),
1495 - offset),
1496 NULL_RTX);
1498 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1501 else
1502 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1503 /* REG_NOTES will be copied later. */
1505 #ifdef HAVE_cc0
1506 /* If this insn is setting CC0, it may need to look at
1507 the insn that uses CC0 to see what type of insn it is.
1508 In that case, the call to recog via validate_change will
1509 fail. So don't substitute constants here. Instead,
1510 do it when we emit the following insn.
1512 For example, see the pyr.md file. That machine has signed and
1513 unsigned compares. The compare patterns must check the
1514 following branch insn to see which what kind of compare to
1515 emit.
1517 If the previous insn set CC0, substitute constants on it as
1518 well. */
1519 if (sets_cc0_p (PATTERN (copy)) != 0)
1520 cc0_insn = copy;
1521 else
1523 if (cc0_insn)
1524 try_constants (cc0_insn, map);
1525 cc0_insn = 0;
1526 try_constants (copy, map);
1528 #else
1529 try_constants (copy, map);
1530 #endif
1531 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1532 break;
1534 case JUMP_INSN:
1535 if (map->integrating && returnjump_p (insn))
1537 if (map->local_return_label == 0)
1538 map->local_return_label = gen_label_rtx ();
1539 pattern = gen_jump (map->local_return_label);
1541 else
1542 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1544 copy = emit_jump_insn (pattern);
1546 #ifdef HAVE_cc0
1547 if (cc0_insn)
1548 try_constants (cc0_insn, map);
1549 cc0_insn = 0;
1550 #endif
1551 try_constants (copy, map);
1552 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1554 /* If this used to be a conditional jump insn but whose branch
1555 direction is now know, we must do something special. */
1556 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1558 #ifdef HAVE_cc0
1559 /* If the previous insn set cc0 for us, delete it. */
1560 if (only_sets_cc0_p (PREV_INSN (copy)))
1561 delete_related_insns (PREV_INSN (copy));
1562 #endif
1564 /* If this is now a no-op, delete it. */
1565 if (map->last_pc_value == pc_rtx)
1567 delete_related_insns (copy);
1568 copy = 0;
1570 else
1571 /* Otherwise, this is unconditional jump so we must put a
1572 BARRIER after it. We could do some dead code elimination
1573 here, but jump.c will do it just as well. */
1574 emit_barrier ();
1576 break;
1578 case CALL_INSN:
1579 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1580 three attached sequences: normal call, sibling call and tail
1581 recursion. */
1582 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1584 rtx sequence[3];
1585 rtx tail_label;
1587 for (i = 0; i < 3; i++)
1589 rtx seq;
1591 sequence[i] = NULL_RTX;
1592 seq = XEXP (PATTERN (insn), i);
1593 if (seq)
1595 start_sequence ();
1596 copy_insn_list (seq, map, static_chain_value);
1597 sequence[i] = get_insns ();
1598 end_sequence ();
1602 /* Find the new tail recursion label.
1603 It will already be substituted into sequence[2]. */
1604 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1605 map, 0);
1607 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1608 sequence[0],
1609 sequence[1],
1610 sequence[2],
1611 tail_label));
1612 break;
1615 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1616 copy = emit_call_insn (pattern);
1618 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1619 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1620 INSN_SCOPE (copy) = INSN_SCOPE (insn);
1622 /* Because the USAGE information potentially contains objects other
1623 than hard registers, we need to copy it. */
1625 CALL_INSN_FUNCTION_USAGE (copy)
1626 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1627 map, 0);
1629 #ifdef HAVE_cc0
1630 if (cc0_insn)
1631 try_constants (cc0_insn, map);
1632 cc0_insn = 0;
1633 #endif
1634 try_constants (copy, map);
1636 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1637 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1638 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1639 break;
1641 case CODE_LABEL:
1642 copy = emit_label (get_label_from_map (map,
1643 CODE_LABEL_NUMBER (insn)));
1644 LABEL_NAME (copy) = LABEL_NAME (insn);
1645 map->const_age++;
1646 break;
1648 case BARRIER:
1649 copy = emit_barrier ();
1650 break;
1652 case NOTE:
1653 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1655 copy = emit_label (get_label_from_map (map,
1656 CODE_LABEL_NUMBER (insn)));
1657 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1658 map->const_age++;
1659 break;
1662 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1663 discarded because it is important to have only one of
1664 each in the current function.
1666 NOTE_INSN_DELETED notes aren't useful. */
1668 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1669 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1670 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1672 copy = emit_note (NOTE_SOURCE_FILE (insn),
1673 NOTE_LINE_NUMBER (insn));
1674 if (copy
1675 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1676 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1677 && NOTE_BLOCK (insn))
1679 tree *mapped_block_p;
1681 mapped_block_p
1682 = (tree *) bsearch (NOTE_BLOCK (insn),
1683 &VARRAY_TREE (map->block_map, 0),
1684 map->block_map->elements_used,
1685 sizeof (tree),
1686 find_block);
1688 if (!mapped_block_p)
1689 abort ();
1690 else
1691 NOTE_BLOCK (copy) = *mapped_block_p;
1693 else if (copy
1694 && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1695 NOTE_EXPECTED_VALUE (copy)
1696 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1697 map, 0);
1699 else
1700 copy = 0;
1701 break;
1703 default:
1704 abort ();
1707 if (copy)
1708 RTX_INTEGRATED_P (copy) = 1;
1710 map->insn_map[INSN_UID (insn)] = copy;
1714 /* Copy the REG_NOTES. Increment const_age, so that only constants
1715 from parameters can be substituted in. These are the only ones
1716 that are valid across the entire function. */
1718 static void
1719 copy_insn_notes (insns, map, eh_region_offset)
1720 rtx insns;
1721 struct inline_remap *map;
1722 int eh_region_offset;
1724 rtx insn, new_insn;
1726 map->const_age++;
1727 for (insn = insns; insn; insn = NEXT_INSN (insn))
1729 if (! INSN_P (insn))
1730 continue;
1732 new_insn = map->insn_map[INSN_UID (insn)];
1733 if (! new_insn)
1734 continue;
1736 if (REG_NOTES (insn))
1738 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1740 /* We must also do subst_constants, in case one of our parameters
1741 has const type and constant value. */
1742 subst_constants (&note, NULL_RTX, map, 0);
1743 apply_change_group ();
1744 REG_NOTES (new_insn) = note;
1746 /* Delete any REG_LABEL notes from the chain. Remap any
1747 REG_EH_REGION notes. */
1748 for (; note; note = next)
1750 next = XEXP (note, 1);
1751 if (REG_NOTE_KIND (note) == REG_LABEL)
1752 remove_note (new_insn, note);
1753 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1754 && INTVAL (XEXP (note, 0)) > 0)
1755 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1756 + eh_region_offset);
1760 if (GET_CODE (insn) == CALL_INSN
1761 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1763 int i;
1764 for (i = 0; i < 3; i++)
1765 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1768 if (GET_CODE (insn) == JUMP_INSN
1769 && GET_CODE (PATTERN (insn)) == RESX)
1770 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1774 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1775 push all of those decls and give each one the corresponding home. */
1777 static void
1778 integrate_parm_decls (args, map, arg_vector)
1779 tree args;
1780 struct inline_remap *map;
1781 rtvec arg_vector;
1783 tree tail;
1784 int i;
1786 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1788 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1789 current_function_decl);
1790 rtx new_decl_rtl
1791 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1793 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1794 here, but that's going to require some more work. */
1795 /* DECL_INCOMING_RTL (decl) = ?; */
1796 /* Fully instantiate the address with the equivalent form so that the
1797 debugging information contains the actual register, instead of the
1798 virtual register. Do this by not passing an insn to
1799 subst_constants. */
1800 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1801 apply_change_group ();
1802 SET_DECL_RTL (decl, new_decl_rtl);
1806 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1807 current function a tree of contexts isomorphic to the one that is given.
1809 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1810 registers used in the DECL_RTL field should be remapped. If it is zero,
1811 no mapping is necessary. */
1813 static tree
1814 integrate_decl_tree (let, map)
1815 tree let;
1816 struct inline_remap *map;
1818 tree t;
1819 tree new_block;
1820 tree *next;
1822 new_block = make_node (BLOCK);
1823 VARRAY_PUSH_TREE (map->block_map, new_block);
1824 next = &BLOCK_VARS (new_block);
1826 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1828 tree d;
1830 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1832 if (DECL_RTL_SET_P (t))
1834 rtx r;
1836 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1838 /* Fully instantiate the address with the equivalent form so that the
1839 debugging information contains the actual register, instead of the
1840 virtual register. Do this by not passing an insn to
1841 subst_constants. */
1842 r = DECL_RTL (d);
1843 subst_constants (&r, NULL_RTX, map, 1);
1844 SET_DECL_RTL (d, r);
1846 apply_change_group ();
1849 /* Add this declaration to the list of variables in the new
1850 block. */
1851 *next = d;
1852 next = &TREE_CHAIN (d);
1855 next = &BLOCK_SUBBLOCKS (new_block);
1856 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1858 *next = integrate_decl_tree (t, map);
1859 BLOCK_SUPERCONTEXT (*next) = new_block;
1860 next = &BLOCK_CHAIN (*next);
1863 TREE_USED (new_block) = TREE_USED (let);
1864 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1866 return new_block;
1869 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1870 except for those few rtx codes that are sharable.
1872 We always return an rtx that is similar to that incoming rtx, with the
1873 exception of possibly changing a REG to a SUBREG or vice versa. No
1874 rtl is ever emitted.
1876 If FOR_LHS is nonzero, if means we are processing something that will
1877 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1878 inlining since we need to be conservative in how it is set for
1879 such cases.
1881 Handle constants that need to be placed in the constant pool by
1882 calling `force_const_mem'. */
1885 copy_rtx_and_substitute (orig, map, for_lhs)
1886 rtx orig;
1887 struct inline_remap *map;
1888 int for_lhs;
1890 rtx copy, temp;
1891 int i, j;
1892 RTX_CODE code;
1893 enum machine_mode mode;
1894 const char *format_ptr;
1895 int regno;
1897 if (orig == 0)
1898 return 0;
1900 code = GET_CODE (orig);
1901 mode = GET_MODE (orig);
1903 switch (code)
1905 case REG:
1906 /* If the stack pointer register shows up, it must be part of
1907 stack-adjustments (*not* because we eliminated the frame pointer!).
1908 Small hard registers are returned as-is. Pseudo-registers
1909 go through their `reg_map'. */
1910 regno = REGNO (orig);
1911 if (regno <= LAST_VIRTUAL_REGISTER
1912 || (map->integrating
1913 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1915 /* Some hard registers are also mapped,
1916 but others are not translated. */
1917 if (map->reg_map[regno] != 0)
1918 return map->reg_map[regno];
1920 /* If this is the virtual frame pointer, make space in current
1921 function's stack frame for the stack frame of the inline function.
1923 Copy the address of this area into a pseudo. Map
1924 virtual_stack_vars_rtx to this pseudo and set up a constant
1925 equivalence for it to be the address. This will substitute the
1926 address into insns where it can be substituted and use the new
1927 pseudo where it can't. */
1928 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1930 rtx loc, seq;
1931 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1932 #ifdef FRAME_GROWS_DOWNWARD
1933 int alignment
1934 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1935 / BITS_PER_UNIT);
1937 /* In this case, virtual_stack_vars_rtx points to one byte
1938 higher than the top of the frame area. So make sure we
1939 allocate a big enough chunk to keep the frame pointer
1940 aligned like a real one. */
1941 if (alignment)
1942 size = CEIL_ROUND (size, alignment);
1943 #endif
1944 start_sequence ();
1945 loc = assign_stack_temp (BLKmode, size, 1);
1946 loc = XEXP (loc, 0);
1947 #ifdef FRAME_GROWS_DOWNWARD
1948 /* In this case, virtual_stack_vars_rtx points to one byte
1949 higher than the top of the frame area. So compute the offset
1950 to one byte higher than our substitute frame. */
1951 loc = plus_constant (loc, size);
1952 #endif
1953 map->reg_map[regno] = temp
1954 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1956 #ifdef STACK_BOUNDARY
1957 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1958 #endif
1960 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1962 seq = get_insns ();
1963 end_sequence ();
1964 emit_insn_after (seq, map->insns_at_start);
1965 return temp;
1967 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1968 || (map->integrating
1969 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1970 == orig)))
1972 /* Do the same for a block to contain any arguments referenced
1973 in memory. */
1974 rtx loc, seq;
1975 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1977 start_sequence ();
1978 loc = assign_stack_temp (BLKmode, size, 1);
1979 loc = XEXP (loc, 0);
1980 /* When arguments grow downward, the virtual incoming
1981 args pointer points to the top of the argument block,
1982 so the remapped location better do the same. */
1983 #ifdef ARGS_GROW_DOWNWARD
1984 loc = plus_constant (loc, size);
1985 #endif
1986 map->reg_map[regno] = temp
1987 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1989 #ifdef STACK_BOUNDARY
1990 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1991 #endif
1993 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1995 seq = get_insns ();
1996 end_sequence ();
1997 emit_insn_after (seq, map->insns_at_start);
1998 return temp;
2000 else if (REG_FUNCTION_VALUE_P (orig))
2002 /* This is a reference to the function return value. If
2003 the function doesn't have a return value, error. If the
2004 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2005 if (map->inline_target == 0)
2007 if (rtx_equal_function_value_matters)
2008 /* This is an ignored return value. We must not
2009 leave it in with REG_FUNCTION_VALUE_P set, since
2010 that would confuse subsequent inlining of the
2011 current function into a later function. */
2012 return gen_rtx_REG (GET_MODE (orig), regno);
2013 else
2014 /* Must be unrolling loops or replicating code if we
2015 reach here, so return the register unchanged. */
2016 return orig;
2018 else if (GET_MODE (map->inline_target) != BLKmode
2019 && mode != GET_MODE (map->inline_target))
2020 return gen_lowpart (mode, map->inline_target);
2021 else
2022 return map->inline_target;
2024 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2025 /* If leaf_renumber_regs_insn() might remap this register to
2026 some other number, make sure we don't share it with the
2027 inlined function, otherwise delayed optimization of the
2028 inlined function may change it in place, breaking our
2029 reference to it. We may still shared it within the
2030 function, so create an entry for this register in the
2031 reg_map. */
2032 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2033 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2035 if (!map->leaf_reg_map[regno][mode])
2036 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2037 return map->leaf_reg_map[regno][mode];
2039 #endif
2040 else
2041 return orig;
2043 abort ();
2045 if (map->reg_map[regno] == NULL)
2047 map->reg_map[regno] = gen_reg_rtx (mode);
2048 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2049 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2050 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2051 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2053 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2054 mark_reg_pointer (map->reg_map[regno],
2055 map->regno_pointer_align[regno]);
2057 return map->reg_map[regno];
2059 case SUBREG:
2060 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2061 return simplify_gen_subreg (GET_MODE (orig), copy,
2062 GET_MODE (SUBREG_REG (orig)),
2063 SUBREG_BYTE (orig));
2065 case ADDRESSOF:
2066 copy = gen_rtx_ADDRESSOF (mode,
2067 copy_rtx_and_substitute (XEXP (orig, 0),
2068 map, for_lhs),
2069 0, ADDRESSOF_DECL (orig));
2070 regno = ADDRESSOF_REGNO (orig);
2071 if (map->reg_map[regno])
2072 regno = REGNO (map->reg_map[regno]);
2073 else if (regno > LAST_VIRTUAL_REGISTER)
2075 temp = XEXP (orig, 0);
2076 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2077 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2078 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2079 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2080 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2082 /* Objects may initially be represented as registers, but
2083 but turned into a MEM if their address is taken by
2084 put_var_into_stack. Therefore, the register table may have
2085 entries which are MEMs.
2087 We briefly tried to clear such entries, but that ended up
2088 cascading into many changes due to the optimizers not being
2089 prepared for empty entries in the register table. So we've
2090 decided to allow the MEMs in the register table for now. */
2091 if (REG_P (map->x_regno_reg_rtx[regno])
2092 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2093 mark_reg_pointer (map->reg_map[regno],
2094 map->regno_pointer_align[regno]);
2095 regno = REGNO (map->reg_map[regno]);
2097 ADDRESSOF_REGNO (copy) = regno;
2098 return copy;
2100 case USE:
2101 case CLOBBER:
2102 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2103 to (use foo) if the original insn didn't have a subreg.
2104 Removing the subreg distorts the VAX movstrhi pattern
2105 by changing the mode of an operand. */
2106 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2107 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2108 copy = SUBREG_REG (copy);
2109 return gen_rtx_fmt_e (code, VOIDmode, copy);
2111 /* We need to handle "deleted" labels that appear in the DECL_RTL
2112 of a LABEL_DECL. */
2113 case NOTE:
2114 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2115 break;
2117 /* ... FALLTHRU ... */
2118 case CODE_LABEL:
2119 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2120 = LABEL_PRESERVE_P (orig);
2121 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2123 case LABEL_REF:
2124 copy
2125 = gen_rtx_LABEL_REF
2126 (mode,
2127 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2128 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2130 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2132 /* The fact that this label was previously nonlocal does not mean
2133 it still is, so we must check if it is within the range of
2134 this function's labels. */
2135 LABEL_REF_NONLOCAL_P (copy)
2136 = (LABEL_REF_NONLOCAL_P (orig)
2137 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2138 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2140 /* If we have made a nonlocal label local, it means that this
2141 inlined call will be referring to our nonlocal goto handler.
2142 So make sure we create one for this block; we normally would
2143 not since this is not otherwise considered a "call". */
2144 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2145 function_call_count++;
2147 return copy;
2149 case PC:
2150 case CC0:
2151 case CONST_INT:
2152 case CONST_VECTOR:
2153 return orig;
2155 case SYMBOL_REF:
2156 /* Symbols which represent the address of a label stored in the constant
2157 pool must be modified to point to a constant pool entry for the
2158 remapped label. Otherwise, symbols are returned unchanged. */
2159 if (CONSTANT_POOL_ADDRESS_P (orig))
2161 struct function *f = inlining ? inlining : cfun;
2162 rtx constant = get_pool_constant_for_function (f, orig);
2163 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2164 if (inlining)
2166 rtx temp = force_const_mem (const_mode,
2167 copy_rtx_and_substitute (constant,
2168 map, 0));
2170 #if 0
2171 /* Legitimizing the address here is incorrect.
2173 Since we had a SYMBOL_REF before, we can assume it is valid
2174 to have one in this position in the insn.
2176 Also, change_address may create new registers. These
2177 registers will not have valid reg_map entries. This can
2178 cause try_constants() to fail because assumes that all
2179 registers in the rtx have valid reg_map entries, and it may
2180 end up replacing one of these new registers with junk. */
2182 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2183 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2184 #endif
2186 temp = XEXP (temp, 0);
2188 #ifdef POINTERS_EXTEND_UNSIGNED
2189 if (GET_MODE (temp) != GET_MODE (orig))
2190 temp = convert_memory_address (GET_MODE (orig), temp);
2191 #endif
2192 return temp;
2194 else if (GET_CODE (constant) == LABEL_REF)
2195 return XEXP (force_const_mem
2196 (GET_MODE (orig),
2197 copy_rtx_and_substitute (constant, map, for_lhs)),
2201 return orig;
2203 case CONST_DOUBLE:
2204 /* We have to make a new copy of this CONST_DOUBLE because don't want
2205 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2206 duplicate of a CONST_DOUBLE we have already seen. */
2207 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2209 REAL_VALUE_TYPE d;
2211 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2212 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2214 else
2215 return immed_double_const (CONST_DOUBLE_LOW (orig),
2216 CONST_DOUBLE_HIGH (orig), VOIDmode);
2218 case CONST:
2219 /* Make new constant pool entry for a constant
2220 that was in the pool of the inline function. */
2221 if (RTX_INTEGRATED_P (orig))
2222 abort ();
2223 break;
2225 case ASM_OPERANDS:
2226 /* If a single asm insn contains multiple output operands then
2227 it contains multiple ASM_OPERANDS rtx's that share the input
2228 and constraint vecs. We must make sure that the copied insn
2229 continues to share it. */
2230 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2232 copy = rtx_alloc (ASM_OPERANDS);
2233 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2234 PUT_MODE (copy, GET_MODE (orig));
2235 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2236 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2237 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2238 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2239 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2240 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2241 = map->copy_asm_constraints_vector;
2242 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2243 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2244 return copy;
2246 break;
2248 case CALL:
2249 /* This is given special treatment because the first
2250 operand of a CALL is a (MEM ...) which may get
2251 forced into a register for cse. This is undesirable
2252 if function-address cse isn't wanted or if we won't do cse. */
2253 #ifndef NO_FUNCTION_CSE
2254 if (! (optimize && ! flag_no_function_cse))
2255 #endif
2257 rtx copy
2258 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2259 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2260 map, 0));
2262 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2264 return
2265 gen_rtx_CALL (GET_MODE (orig), copy,
2266 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2268 break;
2270 #if 0
2271 /* Must be ifdefed out for loop unrolling to work. */
2272 case RETURN:
2273 abort ();
2274 #endif
2276 case SET:
2277 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2278 Adjust the setting by the offset of the area we made.
2279 If the nonlocal goto is into the current function,
2280 this will result in unnecessarily bad code, but should work. */
2281 if (SET_DEST (orig) == virtual_stack_vars_rtx
2282 || SET_DEST (orig) == virtual_incoming_args_rtx)
2284 /* In case a translation hasn't occurred already, make one now. */
2285 rtx equiv_reg;
2286 rtx equiv_loc;
2287 HOST_WIDE_INT loc_offset;
2289 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2290 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2291 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2292 REGNO (equiv_reg)).rtx;
2293 loc_offset
2294 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2296 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2297 force_operand
2298 (plus_constant
2299 (copy_rtx_and_substitute (SET_SRC (orig),
2300 map, 0),
2301 - loc_offset),
2302 NULL_RTX));
2304 else
2305 return gen_rtx_SET (VOIDmode,
2306 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2307 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2308 break;
2310 case MEM:
2311 if (inlining
2312 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2313 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2315 enum machine_mode const_mode
2316 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2317 rtx constant
2318 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2320 constant = copy_rtx_and_substitute (constant, map, 0);
2322 /* If this was an address of a constant pool entry that itself
2323 had to be placed in the constant pool, it might not be a
2324 valid address. So the recursive call might have turned it
2325 into a register. In that case, it isn't a constant any
2326 more, so return it. This has the potential of changing a
2327 MEM into a REG, but we'll assume that it safe. */
2328 if (! CONSTANT_P (constant))
2329 return constant;
2331 return validize_mem (force_const_mem (const_mode, constant));
2334 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2335 map, 0));
2336 MEM_COPY_ATTRIBUTES (copy, orig);
2338 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2339 since this may be an indirect reference to a parameter and the
2340 actual may not be readonly. */
2341 if (inlining && !for_lhs)
2342 RTX_UNCHANGING_P (copy) = 0;
2344 /* If inlining, squish aliasing data that references the subroutine's
2345 parameter list, since that's no longer applicable. */
2346 if (inlining && MEM_EXPR (copy)
2347 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2348 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2349 set_mem_expr (copy, NULL_TREE);
2351 return copy;
2353 default:
2354 break;
2357 copy = rtx_alloc (code);
2358 PUT_MODE (copy, mode);
2359 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2360 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2361 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2363 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2365 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2367 switch (*format_ptr++)
2369 case '0':
2370 /* Copy this through the wide int field; that's safest. */
2371 X0WINT (copy, i) = X0WINT (orig, i);
2372 break;
2374 case 'e':
2375 XEXP (copy, i)
2376 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2377 break;
2379 case 'u':
2380 /* Change any references to old-insns to point to the
2381 corresponding copied insns. */
2382 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2383 break;
2385 case 'E':
2386 XVEC (copy, i) = XVEC (orig, i);
2387 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2389 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2390 for (j = 0; j < XVECLEN (copy, i); j++)
2391 XVECEXP (copy, i, j)
2392 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2393 map, for_lhs);
2395 break;
2397 case 'w':
2398 XWINT (copy, i) = XWINT (orig, i);
2399 break;
2401 case 'i':
2402 XINT (copy, i) = XINT (orig, i);
2403 break;
2405 case 's':
2406 XSTR (copy, i) = XSTR (orig, i);
2407 break;
2409 case 't':
2410 XTREE (copy, i) = XTREE (orig, i);
2411 break;
2413 default:
2414 abort ();
2418 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2420 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2421 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2422 map->copy_asm_constraints_vector
2423 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2426 return copy;
2429 /* Substitute known constant values into INSN, if that is valid. */
2431 void
2432 try_constants (insn, map)
2433 rtx insn;
2434 struct inline_remap *map;
2436 int i;
2438 map->num_sets = 0;
2440 /* First try just updating addresses, then other things. This is
2441 important when we have something like the store of a constant
2442 into memory and we can update the memory address but the machine
2443 does not support a constant source. */
2444 subst_constants (&PATTERN (insn), insn, map, 1);
2445 apply_change_group ();
2446 subst_constants (&PATTERN (insn), insn, map, 0);
2447 apply_change_group ();
2449 /* Show we don't know the value of anything stored or clobbered. */
2450 note_stores (PATTERN (insn), mark_stores, NULL);
2451 map->last_pc_value = 0;
2452 #ifdef HAVE_cc0
2453 map->last_cc0_value = 0;
2454 #endif
2456 /* Set up any constant equivalences made in this insn. */
2457 for (i = 0; i < map->num_sets; i++)
2459 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2461 int regno = REGNO (map->equiv_sets[i].dest);
2463 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2464 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2465 /* Following clause is a hack to make case work where GNU C++
2466 reassigns a variable to make cse work right. */
2467 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2468 regno).rtx,
2469 map->equiv_sets[i].equiv))
2470 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2471 map->equiv_sets[i].equiv, map->const_age);
2473 else if (map->equiv_sets[i].dest == pc_rtx)
2474 map->last_pc_value = map->equiv_sets[i].equiv;
2475 #ifdef HAVE_cc0
2476 else if (map->equiv_sets[i].dest == cc0_rtx)
2477 map->last_cc0_value = map->equiv_sets[i].equiv;
2478 #endif
2482 /* Substitute known constants for pseudo regs in the contents of LOC,
2483 which are part of INSN.
2484 If INSN is zero, the substitution should always be done (this is used to
2485 update DECL_RTL).
2486 These changes are taken out by try_constants if the result is not valid.
2488 Note that we are more concerned with determining when the result of a SET
2489 is a constant, for further propagation, than actually inserting constants
2490 into insns; cse will do the latter task better.
2492 This function is also used to adjust address of items previously addressed
2493 via the virtual stack variable or virtual incoming arguments registers.
2495 If MEMONLY is nonzero, only make changes inside a MEM. */
2497 static void
2498 subst_constants (loc, insn, map, memonly)
2499 rtx *loc;
2500 rtx insn;
2501 struct inline_remap *map;
2502 int memonly;
2504 rtx x = *loc;
2505 int i, j;
2506 enum rtx_code code;
2507 const char *format_ptr;
2508 int num_changes = num_validated_changes ();
2509 rtx new = 0;
2510 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2512 code = GET_CODE (x);
2514 switch (code)
2516 case PC:
2517 case CONST_INT:
2518 case CONST_DOUBLE:
2519 case CONST_VECTOR:
2520 case SYMBOL_REF:
2521 case CONST:
2522 case LABEL_REF:
2523 case ADDRESS:
2524 return;
2526 #ifdef HAVE_cc0
2527 case CC0:
2528 if (! memonly)
2529 validate_change (insn, loc, map->last_cc0_value, 1);
2530 return;
2531 #endif
2533 case USE:
2534 case CLOBBER:
2535 /* The only thing we can do with a USE or CLOBBER is possibly do
2536 some substitutions in a MEM within it. */
2537 if (GET_CODE (XEXP (x, 0)) == MEM)
2538 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2539 return;
2541 case REG:
2542 /* Substitute for parms and known constants. Don't replace
2543 hard regs used as user variables with constants. */
2544 if (! memonly)
2546 int regno = REGNO (x);
2547 struct const_equiv_data *p;
2549 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2550 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2551 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2552 p->rtx != 0)
2553 && p->age >= map->const_age)
2554 validate_change (insn, loc, p->rtx, 1);
2556 return;
2558 case SUBREG:
2559 /* SUBREG applied to something other than a reg
2560 should be treated as ordinary, since that must
2561 be a special hack and we don't know how to treat it specially.
2562 Consider for example mulsidi3 in m68k.md.
2563 Ordinary SUBREG of a REG needs this special treatment. */
2564 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2566 rtx inner = SUBREG_REG (x);
2567 rtx new = 0;
2569 /* We can't call subst_constants on &SUBREG_REG (x) because any
2570 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2571 see what is inside, try to form the new SUBREG and see if that is
2572 valid. We handle two cases: extracting a full word in an
2573 integral mode and extracting the low part. */
2574 subst_constants (&inner, NULL_RTX, map, 0);
2575 new = simplify_gen_subreg (GET_MODE (x), inner,
2576 GET_MODE (SUBREG_REG (x)),
2577 SUBREG_BYTE (x));
2579 if (new)
2580 validate_change (insn, loc, new, 1);
2581 else
2582 cancel_changes (num_changes);
2584 return;
2586 break;
2588 case MEM:
2589 subst_constants (&XEXP (x, 0), insn, map, 0);
2591 /* If a memory address got spoiled, change it back. */
2592 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2593 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2594 cancel_changes (num_changes);
2595 return;
2597 case SET:
2599 /* Substitute constants in our source, and in any arguments to a
2600 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2601 itself. */
2602 rtx *dest_loc = &SET_DEST (x);
2603 rtx dest = *dest_loc;
2604 rtx src, tem;
2605 enum machine_mode compare_mode = VOIDmode;
2607 /* If SET_SRC is a COMPARE which subst_constants would turn into
2608 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2609 is to be done. */
2610 if (GET_CODE (SET_SRC (x)) == COMPARE)
2612 src = SET_SRC (x);
2613 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2614 #ifdef HAVE_cc0
2615 || dest == cc0_rtx
2616 #endif
2619 compare_mode = GET_MODE (XEXP (src, 0));
2620 if (compare_mode == VOIDmode)
2621 compare_mode = GET_MODE (XEXP (src, 1));
2625 subst_constants (&SET_SRC (x), insn, map, memonly);
2626 src = SET_SRC (x);
2628 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2629 || GET_CODE (*dest_loc) == SUBREG
2630 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2632 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2634 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2635 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2637 dest_loc = &XEXP (*dest_loc, 0);
2640 /* Do substitute in the address of a destination in memory. */
2641 if (GET_CODE (*dest_loc) == MEM)
2642 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2644 /* Check for the case of DEST a SUBREG, both it and the underlying
2645 register are less than one word, and the SUBREG has the wider mode.
2646 In the case, we are really setting the underlying register to the
2647 source converted to the mode of DEST. So indicate that. */
2648 if (GET_CODE (dest) == SUBREG
2649 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2650 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2651 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2652 <= GET_MODE_SIZE (GET_MODE (dest)))
2653 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2654 src)))
2655 src = tem, dest = SUBREG_REG (dest);
2657 /* If storing a recognizable value save it for later recording. */
2658 if ((map->num_sets < MAX_RECOG_OPERANDS)
2659 && (CONSTANT_P (src)
2660 || (GET_CODE (src) == REG
2661 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2662 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2663 || (GET_CODE (src) == PLUS
2664 && GET_CODE (XEXP (src, 0)) == REG
2665 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2666 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2667 && CONSTANT_P (XEXP (src, 1)))
2668 || GET_CODE (src) == COMPARE
2669 #ifdef HAVE_cc0
2670 || dest == cc0_rtx
2671 #endif
2672 || (dest == pc_rtx
2673 && (src == pc_rtx || GET_CODE (src) == RETURN
2674 || GET_CODE (src) == LABEL_REF))))
2676 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2677 it will cause us to save the COMPARE with any constants
2678 substituted, which is what we want for later. */
2679 rtx src_copy = copy_rtx (src);
2680 map->equiv_sets[map->num_sets].equiv = src_copy;
2681 map->equiv_sets[map->num_sets++].dest = dest;
2682 if (compare_mode != VOIDmode
2683 && GET_CODE (src) == COMPARE
2684 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2685 #ifdef HAVE_cc0
2686 || dest == cc0_rtx
2687 #endif
2689 && GET_MODE (XEXP (src, 0)) == VOIDmode
2690 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2692 map->compare_src = src_copy;
2693 map->compare_mode = compare_mode;
2697 return;
2699 default:
2700 break;
2703 format_ptr = GET_RTX_FORMAT (code);
2705 /* If the first operand is an expression, save its mode for later. */
2706 if (*format_ptr == 'e')
2707 op0_mode = GET_MODE (XEXP (x, 0));
2709 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2711 switch (*format_ptr++)
2713 case '0':
2714 break;
2716 case 'e':
2717 if (XEXP (x, i))
2718 subst_constants (&XEXP (x, i), insn, map, memonly);
2719 break;
2721 case 'u':
2722 case 'i':
2723 case 's':
2724 case 'w':
2725 case 'n':
2726 case 't':
2727 case 'B':
2728 break;
2730 case 'E':
2731 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2732 for (j = 0; j < XVECLEN (x, i); j++)
2733 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2735 break;
2737 default:
2738 abort ();
2742 /* If this is a commutative operation, move a constant to the second
2743 operand unless the second operand is already a CONST_INT. */
2744 if (! memonly
2745 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2746 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2748 rtx tem = XEXP (x, 0);
2749 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2750 validate_change (insn, &XEXP (x, 1), tem, 1);
2753 /* Simplify the expression in case we put in some constants. */
2754 if (! memonly)
2755 switch (GET_RTX_CLASS (code))
2757 case '1':
2758 if (op0_mode == MAX_MACHINE_MODE)
2759 abort ();
2760 new = simplify_unary_operation (code, GET_MODE (x),
2761 XEXP (x, 0), op0_mode);
2762 break;
2764 case '<':
2766 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2768 if (op_mode == VOIDmode)
2769 op_mode = GET_MODE (XEXP (x, 1));
2770 new = simplify_relational_operation (code, op_mode,
2771 XEXP (x, 0), XEXP (x, 1));
2772 #ifdef FLOAT_STORE_FLAG_VALUE
2773 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2775 enum machine_mode mode = GET_MODE (x);
2776 if (new == const0_rtx)
2777 new = CONST0_RTX (mode);
2778 else
2780 REAL_VALUE_TYPE val;
2782 /* Avoid automatic aggregate initialization. */
2783 val = FLOAT_STORE_FLAG_VALUE (mode);
2784 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2787 #endif
2788 break;
2791 case '2':
2792 case 'c':
2793 new = simplify_binary_operation (code, GET_MODE (x),
2794 XEXP (x, 0), XEXP (x, 1));
2795 break;
2797 case 'b':
2798 case '3':
2799 if (op0_mode == MAX_MACHINE_MODE)
2800 abort ();
2802 if (code == IF_THEN_ELSE)
2804 rtx op0 = XEXP (x, 0);
2806 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2807 && GET_MODE (op0) == VOIDmode
2808 && ! side_effects_p (op0)
2809 && XEXP (op0, 0) == map->compare_src
2810 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2812 /* We have compare of two VOIDmode constants for which
2813 we recorded the comparison mode. */
2814 rtx temp =
2815 simplify_relational_operation (GET_CODE (op0),
2816 map->compare_mode,
2817 XEXP (op0, 0),
2818 XEXP (op0, 1));
2820 if (temp == const0_rtx)
2821 new = XEXP (x, 2);
2822 else if (temp == const1_rtx)
2823 new = XEXP (x, 1);
2826 if (!new)
2827 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2828 XEXP (x, 0), XEXP (x, 1),
2829 XEXP (x, 2));
2830 break;
2833 if (new)
2834 validate_change (insn, loc, new, 1);
2837 /* Show that register modified no longer contain known constants. We are
2838 called from note_stores with parts of the new insn. */
2840 static void
2841 mark_stores (dest, x, data)
2842 rtx dest;
2843 rtx x ATTRIBUTE_UNUSED;
2844 void *data ATTRIBUTE_UNUSED;
2846 int regno = -1;
2847 enum machine_mode mode = VOIDmode;
2849 /* DEST is always the innermost thing set, except in the case of
2850 SUBREGs of hard registers. */
2852 if (GET_CODE (dest) == REG)
2853 regno = REGNO (dest), mode = GET_MODE (dest);
2854 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2856 regno = REGNO (SUBREG_REG (dest));
2857 if (regno < FIRST_PSEUDO_REGISTER)
2858 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2859 GET_MODE (SUBREG_REG (dest)),
2860 SUBREG_BYTE (dest),
2861 GET_MODE (dest));
2862 mode = GET_MODE (SUBREG_REG (dest));
2865 if (regno >= 0)
2867 unsigned int uregno = regno;
2868 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2869 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2870 unsigned int i;
2872 /* Ignore virtual stack var or virtual arg register since those
2873 are handled separately. */
2874 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2875 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2876 for (i = uregno; i <= last_reg; i++)
2877 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2878 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2882 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2883 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2884 that it points to the node itself, thus indicating that the node is its
2885 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2886 the given node is NULL, recursively descend the decl/block tree which
2887 it is the root of, and for each other ..._DECL or BLOCK node contained
2888 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2889 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2890 values to point to themselves. */
2892 static void
2893 set_block_origin_self (stmt)
2894 tree stmt;
2896 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2898 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2901 tree local_decl;
2903 for (local_decl = BLOCK_VARS (stmt);
2904 local_decl != NULL_TREE;
2905 local_decl = TREE_CHAIN (local_decl))
2906 set_decl_origin_self (local_decl); /* Potential recursion. */
2910 tree subblock;
2912 for (subblock = BLOCK_SUBBLOCKS (stmt);
2913 subblock != NULL_TREE;
2914 subblock = BLOCK_CHAIN (subblock))
2915 set_block_origin_self (subblock); /* Recurse. */
2920 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2921 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2922 node to so that it points to the node itself, thus indicating that the
2923 node represents its own (abstract) origin. Additionally, if the
2924 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2925 the decl/block tree of which the given node is the root of, and for
2926 each other ..._DECL or BLOCK node contained therein whose
2927 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2928 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2929 point to themselves. */
2931 void
2932 set_decl_origin_self (decl)
2933 tree decl;
2935 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2937 DECL_ABSTRACT_ORIGIN (decl) = decl;
2938 if (TREE_CODE (decl) == FUNCTION_DECL)
2940 tree arg;
2942 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2943 DECL_ABSTRACT_ORIGIN (arg) = arg;
2944 if (DECL_INITIAL (decl) != NULL_TREE
2945 && DECL_INITIAL (decl) != error_mark_node)
2946 set_block_origin_self (DECL_INITIAL (decl));
2951 /* Given a pointer to some BLOCK node, and a boolean value to set the
2952 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2953 the given block, and for all local decls and all local sub-blocks
2954 (recursively) which are contained therein. */
2956 static void
2957 set_block_abstract_flags (stmt, setting)
2958 tree stmt;
2959 int setting;
2961 tree local_decl;
2962 tree subblock;
2964 BLOCK_ABSTRACT (stmt) = setting;
2966 for (local_decl = BLOCK_VARS (stmt);
2967 local_decl != NULL_TREE;
2968 local_decl = TREE_CHAIN (local_decl))
2969 set_decl_abstract_flags (local_decl, setting);
2971 for (subblock = BLOCK_SUBBLOCKS (stmt);
2972 subblock != NULL_TREE;
2973 subblock = BLOCK_CHAIN (subblock))
2974 set_block_abstract_flags (subblock, setting);
2977 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2978 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2979 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2980 set the abstract flags for all of the parameters, local vars, local
2981 blocks and sub-blocks (recursively) to the same setting. */
2983 void
2984 set_decl_abstract_flags (decl, setting)
2985 tree decl;
2986 int setting;
2988 DECL_ABSTRACT (decl) = setting;
2989 if (TREE_CODE (decl) == FUNCTION_DECL)
2991 tree arg;
2993 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2994 DECL_ABSTRACT (arg) = setting;
2995 if (DECL_INITIAL (decl) != NULL_TREE
2996 && DECL_INITIAL (decl) != error_mark_node)
2997 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3001 /* Output the assembly language code for the function FNDECL
3002 from its DECL_SAVED_INSNS. Used for inline functions that are output
3003 at end of compilation instead of where they came in the source. */
3005 static GTY(()) struct function *old_cfun;
3007 void
3008 output_inline_function (fndecl)
3009 tree fndecl;
3011 enum debug_info_type old_write_symbols = write_symbols;
3012 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3013 struct function *f = DECL_SAVED_INSNS (fndecl);
3015 old_cfun = cfun;
3016 cfun = f;
3017 current_function_decl = fndecl;
3019 set_new_last_label_num (f->inl_max_label_num);
3021 /* We're not deferring this any longer. */
3022 DECL_DEFER_OUTPUT (fndecl) = 0;
3024 /* If requested, suppress debugging information. */
3025 if (f->no_debugging_symbols)
3027 write_symbols = NO_DEBUG;
3028 debug_hooks = &do_nothing_debug_hooks;
3031 /* Make sure warnings emitted by the optimizers (e.g. control reaches
3032 end of non-void function) is not wildly incorrect. */
3033 input_filename = DECL_SOURCE_FILE (fndecl);
3034 lineno = DECL_SOURCE_LINE (fndecl);
3036 /* Compile this function all the way down to assembly code. As a
3037 side effect this destroys the saved RTL representation, but
3038 that's okay, because we don't need to inline this anymore. */
3039 rest_of_compilation (fndecl);
3040 DECL_INLINE (fndecl) = 0;
3042 cfun = old_cfun;
3043 current_function_decl = old_cfun ? old_cfun->decl : 0;
3044 write_symbols = old_write_symbols;
3045 debug_hooks = old_debug_hooks;
3049 /* Functions to keep track of the values hard regs had at the start of
3050 the function. */
3053 get_hard_reg_initial_reg (fun, reg)
3054 struct function *fun;
3055 rtx reg;
3057 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3058 int i;
3060 if (ivs == 0)
3061 return NULL_RTX;
3063 for (i = 0; i < ivs->num_entries; i++)
3064 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3065 return ivs->entries[i].hard_reg;
3067 return NULL_RTX;
3071 has_func_hard_reg_initial_val (fun, reg)
3072 struct function *fun;
3073 rtx reg;
3075 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3076 int i;
3078 if (ivs == 0)
3079 return NULL_RTX;
3081 for (i = 0; i < ivs->num_entries; i++)
3082 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3083 return ivs->entries[i].pseudo;
3085 return NULL_RTX;
3089 get_func_hard_reg_initial_val (fun, reg)
3090 struct function *fun;
3091 rtx reg;
3093 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3094 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3096 if (rv)
3097 return rv;
3099 if (ivs == 0)
3101 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3102 ivs = fun->hard_reg_initial_vals;
3103 ivs->num_entries = 0;
3104 ivs->max_entries = 5;
3105 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3108 if (ivs->num_entries >= ivs->max_entries)
3110 ivs->max_entries += 5;
3111 ivs->entries =
3112 (initial_value_pair *) ggc_realloc (ivs->entries,
3113 ivs->max_entries
3114 * sizeof (initial_value_pair));
3117 ivs->entries[ivs->num_entries].hard_reg = reg;
3118 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3120 return ivs->entries[ivs->num_entries++].pseudo;
3124 get_hard_reg_initial_val (mode, regno)
3125 enum machine_mode mode;
3126 int regno;
3128 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3132 has_hard_reg_initial_val (mode, regno)
3133 enum machine_mode mode;
3134 int regno;
3136 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3139 static void
3140 setup_initial_hard_reg_value_integration (inl_f, remap)
3141 struct function *inl_f;
3142 struct inline_remap *remap;
3144 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3145 int i;
3147 if (ivs == 0)
3148 return;
3150 for (i = 0; i < ivs->num_entries; i ++)
3151 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3152 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3156 void
3157 emit_initial_value_sets ()
3159 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3160 int i;
3161 rtx seq;
3163 if (ivs == 0)
3164 return;
3166 start_sequence ();
3167 for (i = 0; i < ivs->num_entries; i++)
3168 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3169 seq = get_insns ();
3170 end_sequence ();
3172 emit_insn_after (seq, get_insns ());
3175 /* If the backend knows where to allocate pseudos for hard
3176 register initial values, register these allocations now. */
3177 void
3178 allocate_initial_values (reg_equiv_memory_loc)
3179 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3181 #ifdef ALLOCATE_INITIAL_VALUE
3182 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3183 int i;
3185 if (ivs == 0)
3186 return;
3188 for (i = 0; i < ivs->num_entries; i++)
3190 int regno = REGNO (ivs->entries[i].pseudo);
3191 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3193 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3194 ; /* Do nothing. */
3195 else if (GET_CODE (x) == MEM)
3196 reg_equiv_memory_loc[regno] = x;
3197 else if (GET_CODE (x) == REG)
3199 reg_renumber[regno] = REGNO (x);
3200 /* Poke the regno right into regno_reg_rtx
3201 so that even fixed regs are accepted. */
3202 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3204 else abort ();
3206 #endif
3209 #include "gt-integrate.h"